[ 578.120512] env[69982]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69982) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 578.120940] env[69982]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69982) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 578.120940] env[69982]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69982) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 578.121324] env[69982]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 578.228407] env[69982]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69982) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 578.238915] env[69982]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=69982) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 578.283796] env[69982]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 578.847051] env[69982]: INFO nova.virt.driver [None req-5627ec4d-1ab3-472d-9592-e53febb5bbb2 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 578.920997] env[69982]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.921240] env[69982]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.921311] env[69982]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69982) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 582.319010] env[69982]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-66a282a1-b351-4e09-8083-0e543752b4df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.335175] env[69982]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69982) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 582.335388] env[69982]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-0e1efe1e-bd56-4011-9cce-c5eef79d3b25 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.368293] env[69982]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e0f61. [ 582.368535] env[69982]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.447s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.369040] env[69982]: INFO nova.virt.vmwareapi.driver [None req-5627ec4d-1ab3-472d-9592-e53febb5bbb2 None None] VMware vCenter version: 7.0.3 [ 582.372524] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479f594f-a1c8-4ea3-9215-f213dea0116e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.394897] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa2edc5-74d2-47f0-a526-71a5a3d6390b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.401547] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747b834b-db4c-49a8-a1a3-4a1f68ecd92f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.408768] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b1e544-8cfb-4284-81e6-ac7aabd14427 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.423141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8be819-6e28-4fa4-a139-704629a2d7de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.430032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2e0415-60cd-40b7-8c01-b2c35ae589b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.461311] env[69982]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-32dbe1c9-0320-4bf6-a4ec-1c14e5d4ef17 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.467245] env[69982]: DEBUG nova.virt.vmwareapi.driver [None req-5627ec4d-1ab3-472d-9592-e53febb5bbb2 None None] Extension org.openstack.compute already exists. {{(pid=69982) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 582.469920] env[69982]: INFO nova.compute.provider_config [None req-5627ec4d-1ab3-472d-9592-e53febb5bbb2 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 582.973041] env[69982]: DEBUG nova.context [None req-5627ec4d-1ab3-472d-9592-e53febb5bbb2 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),5b8ab85c-1108-41ed-a555-352a3350d6fd(cell1) {{(pid=69982) load_cells /opt/stack/nova/nova/context.py:464}} [ 582.975154] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.975393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.976157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.976611] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Acquiring lock "5b8ab85c-1108-41ed-a555-352a3350d6fd" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.976813] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Lock "5b8ab85c-1108-41ed-a555-352a3350d6fd" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.977943] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Lock "5b8ab85c-1108-41ed-a555-352a3350d6fd" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.998672] env[69982]: INFO dbcounter [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Registered counter for database nova_cell0 [ 583.006877] env[69982]: INFO dbcounter [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Registered counter for database nova_cell1 [ 583.010234] env[69982]: DEBUG oslo_db.sqlalchemy.engines [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69982) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 583.010606] env[69982]: DEBUG oslo_db.sqlalchemy.engines [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69982) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 583.015531] env[69982]: ERROR nova.db.main.api [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 583.015531] env[69982]: result = function(*args, **kwargs) [ 583.015531] env[69982]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.015531] env[69982]: return func(*args, **kwargs) [ 583.015531] env[69982]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 583.015531] env[69982]: result = fn(*args, **kwargs) [ 583.015531] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 583.015531] env[69982]: return f(*args, **kwargs) [ 583.015531] env[69982]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 583.015531] env[69982]: return db.service_get_minimum_version(context, binaries) [ 583.015531] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 583.015531] env[69982]: _check_db_access() [ 583.015531] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 583.015531] env[69982]: stacktrace = ''.join(traceback.format_stack()) [ 583.015531] env[69982]: [ 583.016371] env[69982]: ERROR nova.db.main.api [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 583.016371] env[69982]: result = function(*args, **kwargs) [ 583.016371] env[69982]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 583.016371] env[69982]: return func(*args, **kwargs) [ 583.016371] env[69982]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 583.016371] env[69982]: result = fn(*args, **kwargs) [ 583.016371] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 583.016371] env[69982]: return f(*args, **kwargs) [ 583.016371] env[69982]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 583.016371] env[69982]: return db.service_get_minimum_version(context, binaries) [ 583.016371] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 583.016371] env[69982]: _check_db_access() [ 583.016371] env[69982]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 583.016371] env[69982]: stacktrace = ''.join(traceback.format_stack()) [ 583.016371] env[69982]: [ 583.016754] env[69982]: WARNING nova.objects.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Failed to get minimum service version for cell 5b8ab85c-1108-41ed-a555-352a3350d6fd [ 583.016888] env[69982]: WARNING nova.objects.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 583.017340] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Acquiring lock "singleton_lock" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.017531] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Acquired lock "singleton_lock" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.017783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Releasing lock "singleton_lock" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.018123] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Full set of CONF: {{(pid=69982) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 583.018273] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ******************************************************************************** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 583.018402] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] Configuration options gathered from: {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 583.018570] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 583.018767] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 583.018897] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ================================================================================ {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 583.019122] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] allow_resize_to_same_host = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.019396] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] arq_binding_timeout = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.019457] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] backdoor_port = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.019547] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] backdoor_socket = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.019710] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] block_device_allocate_retries = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.019872] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] block_device_allocate_retries_interval = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020061] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cert = self.pem {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020237] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020407] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute_monitors = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020581] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] config_dir = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020750] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] config_drive_format = iso9660 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.020884] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021060] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] config_source = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021232] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] console_host = devstack {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021400] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] control_exchange = nova {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021559] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cpu_allocation_ratio = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021721] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] daemon = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.021888] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] debug = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022059] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_access_ip_network_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022246] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_availability_zone = nova {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022386] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_ephemeral_format = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022546] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_green_pool_size = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022790] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.022956] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] default_schedule_zone = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023128] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] disk_allocation_ratio = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023291] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] enable_new_services = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023469] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] enabled_apis = ['osapi_compute'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023635] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] enabled_ssl_apis = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023793] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] flat_injected = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.023952] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] force_config_drive = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.024119] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] force_raw_images = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.024287] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] graceful_shutdown_timeout = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.024447] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] heal_instance_info_cache_interval = -1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.024672] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] host = cpu-1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.024856] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025030] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025196] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025423] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025617] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_build_timeout = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025785] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_delete_interval = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.025954] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_format = [instance: %(uuid)s] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026136] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_name_template = instance-%08x {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026298] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_usage_audit = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026466] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_usage_audit_period = month {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026629] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026793] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.026956] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] internal_service_availability_zone = internal {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027126] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] key = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027284] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] live_migration_retry_count = 30 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027506] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_color = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027653] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_config_append = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027827] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.027989] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_dir = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028159] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028288] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_options = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028469] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_rotate_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028648] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_rotate_interval_type = days {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028816] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] log_rotation_type = none {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.028944] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029085] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029259] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029424] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029552] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029715] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] long_rpc_timeout = 1800 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.029872] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_concurrent_builds = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030039] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_concurrent_live_migrations = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030202] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_concurrent_snapshots = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030381] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_local_block_devices = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030519] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_logfile_count = 30 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030675] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] max_logfile_size_mb = 200 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030831] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] maximum_instance_delete_attempts = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.030996] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metadata_listen = 0.0.0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.031175] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metadata_listen_port = 8775 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.031343] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metadata_workers = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.031503] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] migrate_max_retries = -1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.031669] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] mkisofs_cmd = genisoimage {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.031872] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032022] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] my_ip = 10.180.1.21 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032225] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032390] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] network_allocate_retries = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032566] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032735] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.032896] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] osapi_compute_listen_port = 8774 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033075] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] osapi_compute_unique_server_name_scope = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033248] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] osapi_compute_workers = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033423] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] password_length = 12 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033579] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] periodic_enable = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033739] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] periodic_fuzzy_delay = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.033909] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] pointer_model = usbtablet {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034088] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] preallocate_images = none {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034252] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] publish_errors = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034380] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] pybasedir = /opt/stack/nova {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034544] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ram_allocation_ratio = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034699] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rate_limit_burst = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.034865] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rate_limit_except_level = CRITICAL {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035032] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rate_limit_interval = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035192] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reboot_timeout = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035348] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reclaim_instance_interval = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035566] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] record = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035769] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reimage_timeout_per_gb = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.035941] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] report_interval = 120 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036117] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rescue_timeout = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036280] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reserved_host_cpus = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036442] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reserved_host_disk_mb = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036595] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reserved_host_memory_mb = 512 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036756] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] reserved_huge_pages = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.036915] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] resize_confirm_window = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037102] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] resize_fs_using_block_device = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037267] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] resume_guests_state_on_host_boot = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037470] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037625] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] rpc_response_timeout = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037790] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] run_external_periodic_tasks = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.037959] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] running_deleted_instance_action = reap {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038132] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038292] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] running_deleted_instance_timeout = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038465] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler_instance_sync_interval = 120 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038656] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_down_time = 720 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038826] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] servicegroup_driver = db {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.038981] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] shell_completion = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.039156] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] shelved_offload_time = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.039315] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] shelved_poll_interval = 3600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.039481] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] shutdown_timeout = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.039643] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] source_is_ipv6 = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.039800] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ssl_only = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040088] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040263] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] sync_power_state_interval = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040428] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] sync_power_state_pool_size = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040598] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] syslog_log_facility = LOG_USER {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040755] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] tempdir = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.040912] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] timeout_nbd = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041096] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] transport_url = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041261] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] update_resources_interval = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041421] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_cow_images = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041589] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_journal = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041754] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_json = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.041911] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_rootwrap_daemon = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042078] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_stderr = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042240] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] use_syslog = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042395] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vcpu_pin_set = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042553] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plugging_is_fatal = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042721] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plugging_timeout = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.042885] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] virt_mkfs = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.043054] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] volume_usage_poll_interval = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.043217] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] watch_log_file = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.043386] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] web = /usr/share/spice-html5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 583.043572] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.043740] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.043900] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.044080] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_concurrency.disable_process_locking = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.044710] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.044907] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.045095] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.045277] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.045476] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.045667] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.045859] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.auth_strategy = keystone {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046040] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.compute_link_prefix = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046223] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046395] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.dhcp_domain = novalocal {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046564] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.enable_instance_password = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046730] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.glance_link_prefix = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.046896] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047081] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047249] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.instance_list_per_project_cells = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047410] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.list_records_by_skipping_down_cells = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047607] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.local_metadata_per_cell = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047784] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.max_limit = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.047951] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.metadata_cache_expiration = 15 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.048144] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.neutron_default_tenant_id = default {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.048320] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.response_validation = warn {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.048511] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.use_neutron_default_nets = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.048702] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.048866] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049044] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049223] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049394] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_dynamic_targets = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049559] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_jsonfile_path = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049742] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.049940] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.backend = dogpile.cache.memcached {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050125] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.backend_argument = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050289] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.backend_expiration_time = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050460] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.config_prefix = cache.oslo {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050630] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.dead_timeout = 60.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050794] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.debug_cache_backend = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.050956] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.enable_retry_client = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051131] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.enable_socket_keepalive = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051302] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.enabled = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051465] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.enforce_fips_mode = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051629] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.expiration_time = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051789] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.hashclient_retry_attempts = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.051952] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052126] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_dead_retry = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052287] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_password = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052448] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052612] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052774] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_pool_maxsize = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.052935] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053108] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_sasl_enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053286] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053453] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053613] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.memcache_username = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053775] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.proxies = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.053936] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_db = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054110] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_password = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054283] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054459] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054629] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_server = localhost:6379 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054793] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_socket_timeout = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.054951] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.redis_username = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055127] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.retry_attempts = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055293] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.retry_delay = 0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055472] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.socket_keepalive_count = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055651] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.socket_keepalive_idle = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055818] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.socket_keepalive_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.055978] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.tls_allowed_ciphers = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056153] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.tls_cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056311] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.tls_certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056472] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.tls_enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056630] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cache.tls_keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056799] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.056972] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.auth_type = password {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.057147] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.057322] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.057503] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.057678] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.057841] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.cross_az_attach = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058008] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.debug = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058177] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.endpoint_template = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058340] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.http_retries = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058522] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058690] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.058864] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.os_region_name = RegionOne {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059038] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059200] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cinder.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059370] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059531] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.cpu_dedicated_set = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059687] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.cpu_shared_set = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.059852] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.image_type_exclude_list = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060018] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060186] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060347] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060507] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060679] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.060842] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.resource_provider_association_refresh = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061007] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061179] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.shutdown_retry_interval = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061358] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061537] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] conductor.workers = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061716] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] console.allowed_origins = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.061874] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] console.ssl_ciphers = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062054] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] console.ssl_minimum_version = default {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062224] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] consoleauth.enforce_session_timeout = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062393] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] consoleauth.token_ttl = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062559] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062720] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.062881] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063047] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063207] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063363] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063522] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063681] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063836] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.063990] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064162] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064318] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064472] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064639] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.service_type = accelerator {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064799] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.064955] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065128] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065286] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065486] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065662] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] cyborg.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065836] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.asyncio_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.065997] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.asyncio_slave_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.066184] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.backend = sqlalchemy {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.066355] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.066520] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.connection_debug = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.066689] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.connection_parameters = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.066852] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.connection_recycle_time = 3600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067025] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.connection_trace = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067193] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.db_inc_retry_interval = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067356] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.db_max_retries = 20 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067566] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.db_max_retry_interval = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067748] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.db_retry_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.067916] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.max_overflow = 50 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068092] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.max_pool_size = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068257] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.max_retries = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068437] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068626] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.mysql_wsrep_sync_wait = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068789] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.pool_timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.068952] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.retry_interval = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069128] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.slave_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069290] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.sqlite_synchronous = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069458] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] database.use_db_reconnect = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069619] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.asyncio_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069776] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.asyncio_slave_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.069944] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.backend = sqlalchemy {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070125] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070292] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.connection_debug = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070459] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.connection_parameters = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070651] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.connection_recycle_time = 3600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070821] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.connection_trace = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.070982] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.db_inc_retry_interval = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071161] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.db_max_retries = 20 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071324] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.db_max_retry_interval = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071485] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.db_retry_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071648] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.max_overflow = 50 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071810] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.max_pool_size = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.071970] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.max_retries = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072158] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072319] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072479] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.pool_timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072642] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.retry_interval = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072802] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.slave_connection = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.072963] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] api_database.sqlite_synchronous = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.073151] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] devices.enabled_mdev_types = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.073331] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.073502] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.073690] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ephemeral_storage_encryption.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.073864] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074048] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.api_servers = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074217] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074379] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074542] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074700] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.074859] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075054] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.debug = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075194] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.default_trusted_certificate_ids = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075358] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.enable_certificate_validation = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075542] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.enable_rbd_download = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075712] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.075881] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076056] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076221] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076379] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076540] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.num_retries = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076742] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.rbd_ceph_conf = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.076912] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.rbd_connect_timeout = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077097] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.rbd_pool = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077269] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.rbd_user = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077461] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077624] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077790] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.077969] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.service_type = image {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078158] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078320] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078478] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078639] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078821] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.078986] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.verify_glance_signatures = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.079162] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] glance.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.079333] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] guestfs.debug = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.079503] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.079691] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.auth_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.079872] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080048] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080220] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080382] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080543] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080703] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.080867] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081037] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081202] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081362] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081522] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081680] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.081838] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082014] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.service_type = shared-file-system {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082187] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.share_apply_policy_timeout = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082351] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082507] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082683] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.082896] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.083102] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.083294] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] manila.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.083475] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] mks.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.083834] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084036] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.manager_interval = 2400 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084214] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.precache_concurrency = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084387] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.remove_unused_base_images = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084558] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084728] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.084906] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] image_cache.subdirectory_name = _base {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.085095] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.api_max_retries = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.085264] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.api_retry_interval = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.085428] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.085612] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.auth_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.085803] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086042] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086239] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086414] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.conductor_group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086578] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086743] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.086937] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.087137] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.087326] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.087528] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.087708] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.087879] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.peer_list = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088056] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088219] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088383] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.serial_console_state_timeout = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088545] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088719] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.service_type = baremetal {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.088880] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.shard = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089057] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089221] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089379] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089538] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089721] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.089882] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ironic.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090077] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090258] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] key_manager.fixed_key = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090442] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090604] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.barbican_api_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090763] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.barbican_endpoint = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.090935] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.barbican_endpoint_type = public {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091109] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.barbican_region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091272] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091433] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091598] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091760] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.091917] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092094] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.number_of_retries = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092258] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.retry_delay = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092420] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.send_service_user_token = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092579] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092739] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.092898] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.verify_ssl = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093065] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican.verify_ssl_path = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093235] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093398] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.auth_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093557] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093713] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.093876] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094047] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094209] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094370] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094528] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] barbican_service_user.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094696] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.approle_role_id = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.094854] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.approle_secret_id = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095035] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.kv_mountpoint = secret {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095202] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.kv_path = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095366] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.kv_version = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095548] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.namespace = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095719] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.root_token_id = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.095881] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.ssl_ca_crt_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096064] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.timeout = 60.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096233] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.use_ssl = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096405] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096585] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096748] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.096909] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097080] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097240] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097397] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097591] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097750] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.097908] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098079] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098251] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098444] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098583] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098757] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.service_type = identity {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.098921] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099092] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099255] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099418] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099599] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099766] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] keystone.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.099956] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.ceph_mount_options = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.100281] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.100474] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.connection_uri = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.100638] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_mode = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.100806] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.100972] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_models = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.101160] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_power_governor_high = performance {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.101329] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.101493] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_power_management = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.101667] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.101836] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.device_detach_attempts = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102000] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.device_detach_timeout = 20 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102182] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.disk_cachemodes = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102343] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.disk_prefix = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102507] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.enabled_perf_events = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102668] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.file_backed_memory = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102832] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.gid_maps = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.102994] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.hw_disk_discard = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103166] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.hw_machine_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103335] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_rbd_ceph_conf = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103499] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103662] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103828] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_rbd_glance_store_name = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.103997] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_rbd_pool = rbd {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104177] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_type = default {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104337] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.images_volume_group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104499] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.inject_key = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104664] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.inject_partition = -2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104823] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.inject_password = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.104984] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.iscsi_iface = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105159] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.iser_use_multipath = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105325] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105503] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105676] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_downtime = 500 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105838] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.105995] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106167] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_inbound_addr = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106327] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106485] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106642] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_scheme = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106811] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_timeout_action = abort {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.106970] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_tunnelled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.107139] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_uri = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.107301] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.live_migration_with_native_tls = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.107474] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.max_queues = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.107633] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.107877] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.108055] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.nfs_mount_options = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.108352] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.108544] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.108705] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.108870] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.109045] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.109210] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_pcie_ports = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.109377] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.109543] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.pmem_namespaces = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.109704] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.quobyte_client_cfg = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110023] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110208] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110383] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110549] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110717] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rbd_secret_uuid = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.110873] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rbd_user = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111049] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111226] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111387] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rescue_image_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111546] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rescue_kernel_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111705] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rescue_ramdisk_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.111872] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.112044] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.rx_queue_size = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.112218] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.smbfs_mount_options = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.112518] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.112696] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.snapshot_compression = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.112861] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.snapshot_image_format = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113109] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113283] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.sparse_logical_volumes = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113448] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.swtpm_enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113624] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.swtpm_group = tss {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113795] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.swtpm_user = tss {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.113967] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.sysinfo_serial = unique {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114142] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.tb_cache_size = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114304] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.tx_queue_size = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114470] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.uid_maps = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114638] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.use_virtio_for_bridges = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114811] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.virt_type = kvm {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.114981] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.volume_clear = zero {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.115160] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.volume_clear_size = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.115328] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.volume_enforce_multipath = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.115512] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.volume_use_multipath = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.115688] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_cache_path = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.115863] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.116045] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.116220] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.116391] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.116678] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.116861] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.vzstorage_mount_user = stack {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117040] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117221] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117401] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.auth_type = password {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117589] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117758] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.117924] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118099] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118264] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118448] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.default_floating_pool = public {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118626] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118791] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.extension_sync_interval = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.118955] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.http_retries = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119136] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119298] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119458] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119631] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119793] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.119962] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.ovs_bridge = br-int {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120138] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.physnets = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120309] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.region_name = RegionOne {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120467] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120636] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.service_metadata_proxy = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120795] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.120962] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.service_type = network {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121139] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121297] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121454] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121612] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121794] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.121953] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] neutron.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.122148] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.bdms_in_notifications = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.122333] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.default_level = INFO {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.122505] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.include_share_mapping = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.122680] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.notification_format = unversioned {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.122843] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.notify_on_state_change = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123031] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123219] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] pci.alias = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123391] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] pci.device_spec = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123559] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] pci.report_in_placement = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123732] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.123904] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.auth_type = password {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124088] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124253] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124411] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124574] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124735] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.124894] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125064] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.default_domain_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125229] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.default_domain_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125390] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.domain_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125581] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.domain_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125752] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.125920] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126097] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126260] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126417] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126587] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.password = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126749] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.project_domain_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.126915] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.project_domain_name = Default {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127094] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.project_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127270] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.project_name = service {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127464] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.region_name = RegionOne {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127636] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127804] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.127974] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.service_type = placement {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.128154] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.128315] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.128503] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.128683] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.system_scope = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.128845] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129010] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.trust_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129179] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.user_domain_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129348] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.user_domain_name = Default {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129511] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.user_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129683] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.username = nova {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.129865] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130035] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] placement.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130225] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.cores = 20 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130385] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.count_usage_from_placement = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130558] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130729] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.injected_file_content_bytes = 10240 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.130894] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.injected_file_path_length = 255 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131073] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.injected_files = 5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131244] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.instances = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131408] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.key_pairs = 100 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131573] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.metadata_items = 128 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131739] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.ram = 51200 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.131901] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.recheck_quota = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132080] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.server_group_members = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132252] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.server_groups = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132460] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132635] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] quota.unified_limits_resource_strategy = require {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132807] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.132969] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133146] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.image_metadata_prefilter = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133310] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133472] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.max_attempts = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133633] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.max_placement_results = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133791] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.133947] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134119] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134293] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] scheduler.workers = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134464] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134637] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134825] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.134999] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.135180] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.135349] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.135535] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.135738] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.135910] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.host_subset_size = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136090] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136254] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136416] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136579] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136756] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.136921] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.isolated_hosts = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137099] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.isolated_images = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137270] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137435] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137630] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137802] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.pci_in_placement = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.137967] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.138146] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.138311] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.138502] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.138769] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.138974] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.139165] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.track_instance_changes = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.139350] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.139536] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metrics.required = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.139686] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metrics.weight_multiplier = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.139853] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.140086] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] metrics.weight_setting = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.140429] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.140611] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.140828] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.port_range = 10000:20000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141044] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141229] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141401] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] serial_console.serialproxy_port = 6083 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141598] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141786] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.auth_type = password {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.141952] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.142137] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.142357] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.142536] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.142702] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.142878] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.send_service_user_token = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.143058] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.143223] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] service_user.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.143397] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.agent_enabled = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.143561] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.143893] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.144205] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.144449] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.html5proxy_port = 6082 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.144636] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.image_compression = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.144801] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.jpeg_compression = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.144965] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.playback_compression = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.145149] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.require_secure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.145325] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.server_listen = 127.0.0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.145521] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.145839] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146031] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.streaming_mode = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146202] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] spice.zlib_compression = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146374] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] upgrade_levels.baseapi = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146552] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] upgrade_levels.compute = auto {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146718] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] upgrade_levels.conductor = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.146879] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] upgrade_levels.scheduler = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147061] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147230] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147391] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147586] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147763] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.147928] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148104] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148271] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148439] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vendordata_dynamic_auth.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148647] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.api_retry_count = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148815] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.ca_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.148992] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.149182] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.cluster_name = testcl1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.149355] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.connection_pool_size = 10 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.149516] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.console_delay_seconds = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.149690] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.datastore_regex = ^datastore.* {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.149946] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150150] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.host_password = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150323] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.host_port = 443 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150495] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.host_username = administrator@vsphere.local {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150667] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.insecure = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150830] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.integration_bridge = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.150995] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.maximum_objects = 100 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.151210] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.pbm_default_policy = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.151448] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.pbm_enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.151635] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.pbm_wsdl_location = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.151809] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.151971] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.serial_port_proxy_uri = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.152191] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.serial_port_service_uri = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.152372] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.task_poll_interval = 0.5 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.152547] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.use_linked_clone = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.152719] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.vnc_keymap = en-us {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.152885] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.vnc_port = 5900 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.153061] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vmware.vnc_port_total = 10000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.153256] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.auth_schemes = ['none'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.153433] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.153751] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.153938] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.154124] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.novncproxy_port = 6080 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.154320] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.server_listen = 127.0.0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.154500] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.154666] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.vencrypt_ca_certs = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.154825] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.vencrypt_client_cert = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155047] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vnc.vencrypt_client_key = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155269] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155453] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_deep_image_inspection = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155642] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155809] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.155970] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156150] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.disable_rootwrap = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156314] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.enable_numa_live_migration = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156475] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156640] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156800] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.156960] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.libvirt_disable_apic = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157136] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157302] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157492] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157666] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157831] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.157993] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.158168] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.158328] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.158513] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.158716] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159110] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159110] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.client_socket_timeout = 900 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159268] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.default_pool_size = 1000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159423] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.keep_alive = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159598] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.max_header_line = 16384 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159760] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.159944] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.ssl_ca_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160125] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.ssl_cert_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160290] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.ssl_key_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160459] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.tcp_keepidle = 600 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160649] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160821] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] zvm.ca_file = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.160982] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] zvm.cloud_connector_url = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.161317] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.161494] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] zvm.reachable_timeout = 300 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.161669] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.161848] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162036] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.connection_string = messaging:// {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162207] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.enabled = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162378] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.es_doc_type = notification {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162547] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.es_scroll_size = 10000 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162716] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.es_scroll_time = 2m {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.162878] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.filter_error_trace = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163056] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.hmac_keys = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163228] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.sentinel_service_name = mymaster {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163393] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.socket_timeout = 0.1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163554] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.trace_requests = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163716] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler.trace_sqlalchemy = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.163894] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler_jaeger.process_tags = {} {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164067] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler_jaeger.service_name_prefix = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164221] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] profiler_otlp.service_name_prefix = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164394] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] remote_debug.host = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164555] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] remote_debug.port = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164782] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.164959] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.165139] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.165305] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.165485] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.165712] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.165912] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.166094] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.166263] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.166439] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.166601] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.166845] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167066] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167256] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167484] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167644] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167811] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.167976] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.168166] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.168330] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.168513] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.168704] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.168876] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169049] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169217] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169380] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169542] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169704] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.169899] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170086] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170257] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170433] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170603] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170767] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.170934] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171154] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171286] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171479] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171650] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_notifications.retry = -1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171829] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.171998] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172189] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.auth_section = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172353] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.auth_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172513] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.cafile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172671] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.certfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172832] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.collect_timing = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.172990] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.connect_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.173163] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.connect_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.173321] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_id = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.173492] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.173683] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_override = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.173878] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.174104] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.174312] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.endpoint_service_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.174495] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.insecure = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.174657] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.keyfile = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.174835] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.max_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175037] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.min_version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175235] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.region_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175409] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.retriable_status_codes = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175594] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.service_name = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175757] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.service_type = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.175919] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.split_loggers = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176093] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.status_code_retries = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176256] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.status_code_retry_delay = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176413] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.timeout = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176570] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.valid_interfaces = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176727] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_limit.version = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.176890] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_reports.file_event_handler = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177067] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177231] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] oslo_reports.log_dir = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177402] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177591] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177761] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.177934] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178123] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178295] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178465] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178626] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178782] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.178947] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.179123] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.179284] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] vif_plug_ovs_privileged.user = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.179455] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.179649] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.179829] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180016] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180191] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180367] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180538] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180705] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.180886] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181069] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.isolate_vif = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181240] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181408] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181578] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181751] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.181913] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] os_vif_ovs.per_port_bridge = False {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182097] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.capabilities = [21] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182264] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182422] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.helper_command = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182589] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182750] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.182909] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] privsep_osbrick.user = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183092] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183254] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.group = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183412] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.helper_command = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183578] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183744] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.183900] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] nova_sys_admin.user = None {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 583.184046] env[69982]: DEBUG oslo_service.backend.eventlet.service [None req-3d9e1fba-4a76-4f9a-a03c-31d53fa38561 None None] ******************************************************************************** {{(pid=69982) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 583.184494] env[69982]: INFO nova.service [-] Starting compute node (version 31.0.1) [ 583.688916] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Getting list of instances from cluster (obj){ [ 583.688916] env[69982]: value = "domain-c8" [ 583.688916] env[69982]: _type = "ClusterComputeResource" [ 583.688916] env[69982]: } {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 583.690016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddce609a-438f-47b7-bc36-90b35b980a81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.698956] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Got total of 0 instances {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 583.699567] env[69982]: WARNING nova.virt.vmwareapi.driver [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 583.700062] env[69982]: INFO nova.virt.node [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Generated node identity 206a5498-2e79-46c1-a636-9488a05fb67d [ 583.700345] env[69982]: INFO nova.virt.node [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Wrote node identity 206a5498-2e79-46c1-a636-9488a05fb67d to /opt/stack/data/n-cpu-1/compute_id [ 584.203319] env[69982]: WARNING nova.compute.manager [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Compute nodes ['206a5498-2e79-46c1-a636-9488a05fb67d'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 585.209266] env[69982]: INFO nova.compute.manager [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 586.215431] env[69982]: WARNING nova.compute.manager [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 586.215783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.216308] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.216472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.216628] env[69982]: DEBUG nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 586.217602] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734e0815-da61-4934-8e97-e30f03635416 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.226147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d50e972-60f3-4d5f-9cde-053f169158f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.242615] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff80a4d7-7f17-4902-b350-6d212b60294c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.250564] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf438cb9-740b-4359-a840-3d98faaa773e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.280339] env[69982]: DEBUG nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180847MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 586.280493] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.280717] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.783274] env[69982]: WARNING nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] No compute node record for cpu-1:206a5498-2e79-46c1-a636-9488a05fb67d: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 206a5498-2e79-46c1-a636-9488a05fb67d could not be found. [ 587.288426] env[69982]: INFO nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 206a5498-2e79-46c1-a636-9488a05fb67d [ 588.797514] env[69982]: DEBUG nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 588.797993] env[69982]: DEBUG nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 588.965197] env[69982]: INFO nova.scheduler.client.report [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] [req-521d31b9-8b1d-4ef4-831a-8cb0a884fb53] Created resource provider record via placement API for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 588.983536] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d07ee9-346f-445d-80b0-1315c263e6cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.992092] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67e5098-cdf2-46e3-a96b-c315650a8053 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.022509] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66ee209-52b4-4d37-a864-e4cb15972df3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.030933] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5158edf0-db76-4da0-972e-26f9419b6d2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.045052] env[69982]: DEBUG nova.compute.provider_tree [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 589.588973] env[69982]: DEBUG nova.scheduler.client.report [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 589.589393] env[69982]: DEBUG nova.compute.provider_tree [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 0 to 1 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 589.589748] env[69982]: DEBUG nova.compute.provider_tree [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 589.644172] env[69982]: DEBUG nova.compute.provider_tree [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 1 to 2 during operation: update_traits {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 590.148998] env[69982]: DEBUG nova.compute.resource_tracker [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 590.149399] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.868s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.149399] env[69982]: DEBUG nova.service [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Creating RPC server for service compute {{(pid=69982) start /opt/stack/nova/nova/service.py:186}} [ 590.170610] env[69982]: DEBUG nova.service [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] Join ServiceGroup membership for this service compute {{(pid=69982) start /opt/stack/nova/nova/service.py:203}} [ 590.170858] env[69982]: DEBUG nova.servicegroup.drivers.db [None req-ff9f0204-05a1-49ee-8010-7a8fadfee9c3 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69982) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 593.172330] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.675777] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Getting list of instances from cluster (obj){ [ 593.675777] env[69982]: value = "domain-c8" [ 593.675777] env[69982]: _type = "ClusterComputeResource" [ 593.675777] env[69982]: } {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 593.677013] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488b869b-8199-4d3c-ba60-5fd8c77ea275 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.685889] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Got total of 0 instances {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 593.686148] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.686459] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Getting list of instances from cluster (obj){ [ 593.686459] env[69982]: value = "domain-c8" [ 593.686459] env[69982]: _type = "ClusterComputeResource" [ 593.686459] env[69982]: } {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 593.687359] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d0feb5-7c56-4cd4-8939-94ebca060960 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.695482] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Got total of 0 instances {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 630.170214] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.170621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.678284] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.235263] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.235946] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.238415] env[69982]: INFO nova.compute.claims [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.941911] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "d0114728-9d44-4700-86a9-175e5f840b1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.942210] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "d0114728-9d44-4700-86a9-175e5f840b1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.304970] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd4dbc5-d0ec-48f2-a714-507c7ae566dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.313600] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b899882a-1026-44b4-8abb-48658a2950b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.344217] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2240c579-f2da-4a3a-aaf7-5d7a61276c95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.352938] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483a4777-7f0f-4794-8ada-0c84ff6cca0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.370577] env[69982]: DEBUG nova.compute.provider_tree [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.445874] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 632.874213] env[69982]: DEBUG nova.scheduler.client.report [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.979341] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.379438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.380010] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 633.383956] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.405s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.386790] env[69982]: INFO nova.compute.claims [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.893257] env[69982]: DEBUG nova.compute.utils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.899736] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 634.152488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.152722] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.400981] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 634.503968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bffff0-1ecd-4002-a9f2-7e6a068d5fc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.515511] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d9def79-391c-4636-b73c-377ec5e941ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.549065] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ed7084-3e3a-4b33-952f-f0ab761e055d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.557778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d4782b-56e7-4929-bdf5-70c746377948 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.573497] env[69982]: DEBUG nova.compute.provider_tree [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.656821] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.822287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.822594] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.077798] env[69982]: DEBUG nova.scheduler.client.report [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.188059] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.328412] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 635.413329] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 635.580933] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 635.581222] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.581385] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 635.581568] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.584027] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 635.584027] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 635.584027] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 635.584027] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 635.584027] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 635.584261] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 635.584261] env[69982]: DEBUG nova.virt.hardware [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 635.584331] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.201s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.584791] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.588684] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b97f7e-c2c9-42b2-b3a4-937d9382fd07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.593045] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.405s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.595472] env[69982]: INFO nova.compute.claims [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.604719] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5e04e9-3d39-4af8-a761-fbc1ef6fb987 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.628914] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8363291a-4513-4531-a628-f2fe5d3f45af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.649284] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.660489] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 635.661633] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fdd7d74-ceb0-4eb3-840e-416146966fa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.676209] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Created folder: OpenStack in parent group-v4. [ 635.676437] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating folder: Project (d9c4c1e2c4f94eb38f354a280ff1d862). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 635.676824] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64930622-cd35-4231-b71a-0c4286031fe5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.687141] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Created folder: Project (d9c4c1e2c4f94eb38f354a280ff1d862) in parent group-v767796. [ 635.687324] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating folder: Instances. Parent ref: group-v767797. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 635.687555] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a89de133-e01d-43a4-b988-e3ca3cda7bed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.701022] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Created folder: Instances in parent group-v767797. [ 635.701022] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 635.701022] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 635.701022] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe17f85-29f0-4959-9be9-a5136c147ee5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.716972] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.716972] env[69982]: value = "task-3863987" [ 635.716972] env[69982]: _type = "Task" [ 635.716972] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.725803] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3863987, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.854518] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.097161] env[69982]: DEBUG nova.compute.utils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 636.099039] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.099039] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.187380] env[69982]: DEBUG nova.policy [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b448d520c5de477f8f42581cb5098917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f77a1e221ed742f9b4ffae2507db7d83', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.232279] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3863987, 'name': CreateVM_Task, 'duration_secs': 0.417175} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.232541] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 636.233812] env[69982]: DEBUG oslo_vmware.service [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf688ba4-4122-4ff1-9cb0-7b9aa689b23f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.242091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.242326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.243214] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 636.243570] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1028447a-b3cf-4ee0-9c8a-c685c6604e0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.250130] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 636.250130] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526ea4bf-472a-0bc3-2384-dfb449c1e5de" [ 636.250130] env[69982]: _type = "Task" [ 636.250130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.259433] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526ea4bf-472a-0bc3-2384-dfb449c1e5de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.615501] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.736827] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6363dc-f637-4c4d-8379-d058429bd707 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.747993] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df79864e-197e-4ba4-9526-f81106b21166 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.795812] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63192424-463d-43bd-8c22-126f3e673693 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.808305] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75599020-58ce-4079-b3bd-948fb2c65e20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.813285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.813466] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 636.813691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.813830] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.814263] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 636.814968] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6f7bdbb-665d-44ec-89e2-176aab816b26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.826894] env[69982]: DEBUG nova.compute.provider_tree [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.835606] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 636.835787] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 636.836681] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fcc515-2bfb-44f8-bdc2-04c882c9582e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.845044] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41607332-481f-4b84-af03-a6027d15a8c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.857956] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 636.857956] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52df0065-10c9-727b-e8c7-f2866b9d96ea" [ 636.857956] env[69982]: _type = "Task" [ 636.857956] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.868278] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52df0065-10c9-727b-e8c7-f2866b9d96ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.193491] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Successfully created port: b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.330397] env[69982]: DEBUG nova.scheduler.client.report [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.377695] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 637.377695] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating directory with path [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.378293] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29715fa0-b9cf-49a6-b186-2c7ece1cc426 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.414951] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Created directory with path [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.415189] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Fetch image to [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 637.415387] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Downloading image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk on the data store datastore1 {{(pid=69982) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 637.416221] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664fe018-26c0-4455-9059-d58c257dedfe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.425394] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92b1e53-057d-4e5b-9e6c-0ba08a48e0a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.441632] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f90e6ca-4c1d-415f-bdba-c644a7cb5bc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.486237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3402cff0-a24e-4592-8184-c4e1f30fd1fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.494548] env[69982]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0746a135-5ef1-437d-a300-58015ab32d80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.602812] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Downloading image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to the data store datastore1 {{(pid=69982) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 637.643743] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.684030] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.684340] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.684789] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.684789] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.684789] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.684922] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.686429] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.689989] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.689989] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.689989] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.689989] env[69982]: DEBUG nova.virt.hardware [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.689989] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d22290a-e607-46ee-b3d6-2dcbb260387a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.704938] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6aa428-8d12-449d-9dd4-2981f114fe51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.751764] env[69982]: DEBUG oslo_vmware.rw_handles [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 637.836405] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.837086] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 637.842490] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.989s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.844208] env[69982]: INFO nova.compute.claims [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.294358] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.294358] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.294897] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.294981] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.295734] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.299032] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.299032] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.299032] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 638.299032] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.349647] env[69982]: DEBUG nova.compute.utils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.357138] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.358504] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.369975] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.370586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.499833] env[69982]: DEBUG oslo_vmware.rw_handles [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 638.500044] env[69982]: DEBUG oslo_vmware.rw_handles [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 638.577239] env[69982]: DEBUG nova.policy [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4ecf3bf94764bbea25d59a4fea2ebda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60aa47f826ce4ba7b14d6937eef58338', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.623756] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Downloaded image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk on the data store datastore1 {{(pid=69982) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 638.627445] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 638.627836] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Copying Virtual Disk [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk to [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 638.628149] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22008c84-94e7-4afb-b2ad-6865d7ef4e48 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.641447] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 638.641447] env[69982]: value = "task-3863988" [ 638.641447] env[69982]: _type = "Task" [ 638.641447] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.653951] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.805524] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.858497] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 638.875130] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 638.984056] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22b5fe9-44a4-4c23-b829-fbc60c2366a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.993175] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f225895-8158-468b-b810-a931d4f36927 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.030497] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132f6ded-53bb-4c97-b857-7e1e1758f6ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.039655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ab0c97-c15d-418d-afcb-ab3079e84069 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.055509] env[69982]: DEBUG nova.compute.provider_tree [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.154182] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863988, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.405458] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 639.559265] env[69982]: DEBUG nova.scheduler.client.report [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.657017] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.657017] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Copied Virtual Disk [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk to [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 639.657017] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleting the datastore file [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 639.657017] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4be876da-952d-4a92-8948-a9b0e2da47da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.662969] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 639.662969] env[69982]: value = "task-3863989" [ 639.662969] env[69982]: _type = "Task" [ 639.662969] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.679929] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863989, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.710580] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Successfully created port: 5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.876265] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 639.907745] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 639.907875] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.908044] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 639.908197] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.908339] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 639.908729] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 639.908955] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 639.909126] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 639.909363] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 639.909519] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 639.909816] env[69982]: DEBUG nova.virt.hardware [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 639.910898] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81dd03cd-8ba2-4382-85af-a67a73a4a77c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.921440] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fb7b46-03d1-4767-a4e7-fb1b01121694 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.065908] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.067389] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 640.073040] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.266s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.073280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.002s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.073459] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 640.073789] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.669s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.075336] env[69982]: INFO nova.compute.claims [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 640.079159] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27729a5-f2b5-40f5-9e49-cf9dc778ac4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.093025] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef1c34d-9787-451e-9b83-e9a8bcfbb13b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.118420] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b706cc4c-a7d5-4ec7-8cef-d92487a4e129 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.127846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6785d70-a58a-4ec5-b485-989be3a0202b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.183400] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180847MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 640.183565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.196162] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863989, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025576} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.196297] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 640.196510] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Moving file from [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172. {{(pid=69982) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 640.196660] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c51322a1-30c9-45a9-ac66-f957bf62f786 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.204555] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 640.204555] env[69982]: value = "task-3863990" [ 640.204555] env[69982]: _type = "Task" [ 640.204555] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.217418] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863990, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.434650] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Successfully updated port: b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 640.584879] env[69982]: DEBUG nova.compute.utils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 640.588184] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 640.589244] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 640.721262] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863990, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.030084} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.721262] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] File moved {{(pid=69982) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 640.721262] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Cleaning up location [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 640.721262] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleting the datastore file [datastore1] vmware_temp/557938e0-248b-4d96-b488-ef5b105236ab {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 640.721262] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d21b3d1e-37ba-48fa-b4ab-25755a7595a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.730832] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 640.730832] env[69982]: value = "task-3863991" [ 640.730832] env[69982]: _type = "Task" [ 640.730832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.750195] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863991, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.921526] env[69982]: DEBUG nova.policy [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c8ff9b092e4f40958c53f658165e804e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c90023e59e624e6aa7d3d2363050619f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 640.938267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.938267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.938267] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 641.089220] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 641.223673] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f97c6f0-b06d-4527-97a0-2c7cbf8a8d0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.238572] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc6c601-5f9d-4930-8061-897bb407685f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.250161] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863991, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041949} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.276842] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 641.277921] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a3193ea-d4c5-472c-aa79-0ce81bf42ae1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.281870] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e55b86-fbea-43bd-90c7-4cee607d2b17 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.291213] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 641.291213] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5287f7d4-55c9-92ff-cbf9-a3bd2c63aa92" [ 641.291213] env[69982]: _type = "Task" [ 641.291213] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.304437] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f02c1e-b462-4974-84b0-f4d8e86b9387 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.329171] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5287f7d4-55c9-92ff-cbf9-a3bd2c63aa92, 'name': SearchDatastore_Task, 'duration_secs': 0.013576} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.329812] env[69982]: DEBUG nova.compute.provider_tree [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.331401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.332698] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2/3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.332698] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eedd840b-85ad-4401-b9dd-a33599828d71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.343218] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 641.343218] env[69982]: value = "task-3863992" [ 641.343218] env[69982]: _type = "Task" [ 641.343218] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.352400] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.425875] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.425875] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.563340] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 641.833470] env[69982]: DEBUG nova.scheduler.client.report [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.857750] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863992, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.928344] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.995375] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Successfully created port: dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.102396] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 642.141184] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 642.141184] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 642.141328] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 642.142008] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 642.142008] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 642.142008] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 642.143155] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 642.143155] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 642.143155] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 642.143323] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 642.143816] env[69982]: DEBUG nova.virt.hardware [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 642.144546] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace94d23-b77a-4bae-8082-a0ce84967e8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.154914] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3152c3-df5a-41fa-aad8-bc2be688d4f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.252705] env[69982]: DEBUG nova.network.neutron [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Updating instance_info_cache with network_info: [{"id": "b089582f-8b97-4630-8a47-45009013aceb", "address": "fa:16:3e:a8:be:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb089582f-8b", "ovs_interfaceid": "b089582f-8b97-4630-8a47-45009013aceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.329856] env[69982]: DEBUG nova.compute.manager [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Received event network-vif-plugged-b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 642.332443] env[69982]: DEBUG oslo_concurrency.lockutils [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] Acquiring lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.332443] env[69982]: DEBUG oslo_concurrency.lockutils [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] Lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.332443] env[69982]: DEBUG oslo_concurrency.lockutils [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] Lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.332443] env[69982]: DEBUG nova.compute.manager [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] No waiting events found dispatching network-vif-plugged-b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 642.332443] env[69982]: WARNING nova.compute.manager [req-543be66d-8bb4-4eb0-99c5-a7586d46c72c req-6e2767f8-2a88-4e14-be79-7d853bbfa93b service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Received unexpected event network-vif-plugged-b089582f-8b97-4630-8a47-45009013aceb for instance with vm_state building and task_state spawning. [ 642.346567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.347222] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 642.357209] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.173s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.369570] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863992, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72102} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.370443] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2/3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.370443] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.370443] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2923bcf-b70b-4e53-880e-4227d9fdd38f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.381911] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 642.381911] env[69982]: value = "task-3863993" [ 642.381911] env[69982]: _type = "Task" [ 642.381911] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.398326] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.461323] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.490129] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "743a4a52-ce35-4ec1-9286-e0c470e87186" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.490129] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.757991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.757991] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Instance network_info: |[{"id": "b089582f-8b97-4630-8a47-45009013aceb", "address": "fa:16:3e:a8:be:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb089582f-8b", "ovs_interfaceid": "b089582f-8b97-4630-8a47-45009013aceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 642.758186] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:be:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b089582f-8b97-4630-8a47-45009013aceb', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 642.778808] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Creating folder: Project (f77a1e221ed742f9b4ffae2507db7d83). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.779295] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Successfully updated port: 5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 642.782906] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cc69c90-aa08-473c-a5e5-982ba7df91cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.794740] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Created folder: Project (f77a1e221ed742f9b4ffae2507db7d83) in parent group-v767796. [ 642.795260] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Creating folder: Instances. Parent ref: group-v767800. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 642.795617] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c479cc8d-23d2-420a-8017-a272119447bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.809581] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Created folder: Instances in parent group-v767800. [ 642.809685] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 642.809880] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 642.810099] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac70c2bb-d392-49f5-997c-a1497c39ea58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.839092] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 642.839092] env[69982]: value = "task-3863996" [ 642.839092] env[69982]: _type = "Task" [ 642.839092] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.849283] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3863996, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.862204] env[69982]: DEBUG nova.compute.utils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 642.867216] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 642.867388] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 642.898308] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105066} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.898308] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 642.898810] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c8d4260-4d04-4a88-b10d-30f3194aefd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.925697] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2/3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 642.928419] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deb62a42-52f7-473e-86be-abc8f0c29fe6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.952717] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 642.952717] env[69982]: value = "task-3863997" [ 642.952717] env[69982]: _type = "Task" [ 642.952717] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.962186] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863997, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.991462] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.047291] env[69982]: DEBUG nova.policy [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9d1d57af043c4a0ebf8c9614d5ab19d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'be8434cce0e44f47bd7d88a2f1be7b6e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 643.284230] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.284620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.284620] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 643.357057] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3863996, 'name': CreateVM_Task, 'duration_secs': 0.397018} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.357240] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 643.370190] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 643.376442] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.376925] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.377987] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 643.377987] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b895d533-4c03-49a3-8efe-db563ce0aff9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.387188] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 643.387188] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523cf0ea-32d8-29fe-1766-43c88973d13d" [ 643.387188] env[69982]: _type = "Task" [ 643.387188] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.400215] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523cf0ea-32d8-29fe-1766-43c88973d13d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.410617] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.410769] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d0114728-9d44-4700-86a9-175e5f840b1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.410978] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.411058] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.411776] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance e82ae1bd-c31b-44ca-9608-9348b8eac8dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 643.468572] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863997, 'name': ReconfigVM_Task, 'duration_secs': 0.386092} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.471133] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2/3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.472528] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1067fea7-9aee-433e-933b-7c0d2072cbf5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.481697] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 643.481697] env[69982]: value = "task-3863998" [ 643.481697] env[69982]: _type = "Task" [ 643.481697] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.504423] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863998, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.527527] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.845480] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 643.905098] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523cf0ea-32d8-29fe-1766-43c88973d13d, 'name': SearchDatastore_Task, 'duration_secs': 0.013307} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.905391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.905615] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 643.905848] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.905989] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.906323] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 643.906506] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b94274f-54b4-45db-9285-4705da97a66a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.914111] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 643.917922] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Successfully created port: 62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 643.920855] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 643.921181] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 643.924630] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f3aeedb-e255-4109-baa0-2a52afcca6d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.928272] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 643.928272] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d3621-1433-89b8-8d10-0382a6e8c0a7" [ 643.928272] env[69982]: _type = "Task" [ 643.928272] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.937388] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d3621-1433-89b8-8d10-0382a6e8c0a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.999992] env[69982]: DEBUG nova.compute.manager [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Received event network-vif-plugged-5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 643.999992] env[69982]: DEBUG oslo_concurrency.lockutils [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.999992] env[69982]: DEBUG oslo_concurrency.lockutils [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.999992] env[69982]: DEBUG oslo_concurrency.lockutils [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.999992] env[69982]: DEBUG nova.compute.manager [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] No waiting events found dispatching network-vif-plugged-5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 644.000240] env[69982]: WARNING nova.compute.manager [req-82c60008-765b-4190-adb4-f45a5dd7e18b req-d0e73487-67b6-48e7-8a57-35dbe6e01100 service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Received unexpected event network-vif-plugged-5d41e03b-3fd8-4389-a457-2000cf628f86 for instance with vm_state building and task_state spawning. [ 644.009632] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863998, 'name': Rename_Task, 'duration_secs': 0.148639} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.010177] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.010543] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2ef038d-a302-4e2a-ad64-8a24ac69031b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.018789] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 644.018789] env[69982]: value = "task-3863999" [ 644.018789] env[69982]: _type = "Task" [ 644.018789] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.030911] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863999, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.139816] env[69982]: DEBUG nova.network.neutron [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.380651] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 644.418788] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 644.418788] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 644.418788] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 644.419026] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 644.419684] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 644.419913] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 644.420311] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 644.420786] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 644.420786] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 644.420897] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 644.421038] env[69982]: DEBUG nova.virt.hardware [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 644.421797] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 743a4a52-ce35-4ec1-9286-e0c470e87186 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 644.422528] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 644.423251] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 644.426641] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a991ea20-6850-4405-955e-8a9e328e910f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.444448] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d3621-1433-89b8-8d10-0382a6e8c0a7, 'name': SearchDatastore_Task, 'duration_secs': 0.011862} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.448358] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ef5cc4c-c140-4382-bc87-b6cb450cb2c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.453752] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c0dae4-78d3-4833-9d3e-189a998ca1df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.466571] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 644.466571] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d4f83a-c609-a8e2-6619-eba79838e077" [ 644.466571] env[69982]: _type = "Task" [ 644.466571] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.493863] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d4f83a-c609-a8e2-6619-eba79838e077, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.494168] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.494426] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d0114728-9d44-4700-86a9-175e5f840b1d/d0114728-9d44-4700-86a9-175e5f840b1d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 644.494677] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-530a8b32-50b1-488c-8add-f96834e507c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.502298] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 644.502298] env[69982]: value = "task-3864000" [ 644.502298] env[69982]: _type = "Task" [ 644.502298] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.513077] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864000, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.529200] env[69982]: DEBUG oslo_vmware.api [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3863999, 'name': PowerOnVM_Task, 'duration_secs': 0.457574} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.529873] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 644.530330] env[69982]: INFO nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Took 9.12 seconds to spawn the instance on the hypervisor. [ 644.530593] env[69982]: DEBUG nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.531587] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd7016a-6a1b-4080-a4dd-43e0541d7165 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.603390] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de4b508-277e-4859-b13c-afafb145b6dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.609462] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921c8d51-2f9c-4526-a262-eb2c6f73fb75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.653443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.653650] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Instance network_info: |[{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 644.654931] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:22:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d41e03b-3fd8-4389-a457-2000cf628f86', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 644.668433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Creating folder: Project (60aa47f826ce4ba7b14d6937eef58338). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.669746] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa07c6d9-819b-471e-b522-d44ee1f9f5cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.674552] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f24f0916-46f8-4f9d-82c6-13c8530a19b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.686185] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5769b67e-cfc0-4afd-82bf-7561b330f8c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.696895] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Created folder: Project (60aa47f826ce4ba7b14d6937eef58338) in parent group-v767796. [ 644.697016] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Creating folder: Instances. Parent ref: group-v767803. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 644.697984] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-accfb6eb-3a52-42a5-ab4a-c18c6cfa9c4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.716102] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.728228] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Created folder: Instances in parent group-v767803. [ 644.728228] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 644.728228] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 644.728228] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a2a50e0-f34a-4af7-b5fd-07d836b6c498 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.753187] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 644.753187] env[69982]: value = "task-3864003" [ 644.753187] env[69982]: _type = "Task" [ 644.753187] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.763330] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864003, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.998406] env[69982]: DEBUG nova.compute.manager [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Received event network-changed-b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 644.998406] env[69982]: DEBUG nova.compute.manager [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Refreshing instance network info cache due to event network-changed-b089582f-8b97-4630-8a47-45009013aceb. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 644.998794] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] Acquiring lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.998794] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] Acquired lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.999541] env[69982]: DEBUG nova.network.neutron [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Refreshing network info cache for port b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 645.028653] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864000, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504781} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.029407] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d0114728-9d44-4700-86a9-175e5f840b1d/d0114728-9d44-4700-86a9-175e5f840b1d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.029931] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.030857] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56d8fcca-2e3e-45cc-b245-40f0a74da40c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.059073] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 645.059073] env[69982]: value = "task-3864004" [ 645.059073] env[69982]: _type = "Task" [ 645.059073] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.069734] env[69982]: INFO nova.compute.manager [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Took 13.88 seconds to build instance. [ 645.076372] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.219951] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 645.238456] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Successfully updated port: dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 645.275805] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864003, 'name': CreateVM_Task, 'duration_secs': 0.510384} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.275805] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 645.276581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.276816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.278283] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 645.279043] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-720a2f2c-d09e-4192-9a30-809f967996c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.287945] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 645.287945] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d91d6e-010f-bb9b-7176-422bb04bb1c7" [ 645.287945] env[69982]: _type = "Task" [ 645.287945] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.298370] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d91d6e-010f-bb9b-7176-422bb04bb1c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.579132] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4ff37c94-6489-460e-88dd-fd3e948ff76d tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.408s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.579457] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085008} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.579742] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 645.580612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1285cb6b-1e8f-4772-8098-08cd9ad5d18d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.617175] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] d0114728-9d44-4700-86a9-175e5f840b1d/d0114728-9d44-4700-86a9-175e5f840b1d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 645.617175] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ad56307-8c5d-4bb5-8dd5-e376169d45ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.640832] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 645.640832] env[69982]: value = "task-3864005" [ 645.640832] env[69982]: _type = "Task" [ 645.640832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.649830] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864005, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.731293] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 645.731456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.375s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.731771] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.271s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.733498] env[69982]: INFO nova.compute.claims [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.744687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.744841] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquired lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.745048] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 645.804513] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d91d6e-010f-bb9b-7176-422bb04bb1c7, 'name': SearchDatastore_Task, 'duration_secs': 0.035782} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.805445] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.805692] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.805917] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.806070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.806245] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.806962] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd8ae377-1275-4eae-8b5f-b5cc513534fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.818271] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.818271] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.819174] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea873cd1-5159-409d-8c78-6efd2ecbb20a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.825732] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 645.825732] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52da0e81-9370-0f3d-24ff-9287919b74c3" [ 645.825732] env[69982]: _type = "Task" [ 645.825732] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.836993] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52da0e81-9370-0f3d-24ff-9287919b74c3, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.838563] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-124c296d-218a-48b1-82db-0bbe4e1f3532 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.845337] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 645.845337] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523f9946-a42f-e71a-8f36-10da46665584" [ 645.845337] env[69982]: _type = "Task" [ 645.845337] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.854811] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523f9946-a42f-e71a-8f36-10da46665584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.153135] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864005, 'name': ReconfigVM_Task, 'duration_secs': 0.307421} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.153482] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Reconfigured VM instance instance-00000002 to attach disk [datastore1] d0114728-9d44-4700-86a9-175e5f840b1d/d0114728-9d44-4700-86a9-175e5f840b1d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 646.154089] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c34cfe7e-9183-4605-b6ed-cf40279e1a39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.162313] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 646.162313] env[69982]: value = "task-3864006" [ 646.162313] env[69982]: _type = "Task" [ 646.162313] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.173591] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864006, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.367491] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523f9946-a42f-e71a-8f36-10da46665584, 'name': SearchDatastore_Task, 'duration_secs': 0.009555} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.367769] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.368026] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.368293] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f097850e-039f-40bc-ac89-f30af2708148 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.381282] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 646.381282] env[69982]: value = "task-3864007" [ 646.381282] env[69982]: _type = "Task" [ 646.381282] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.398808] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.459378] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 646.563665] env[69982]: DEBUG nova.network.neutron [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Updated VIF entry in instance network info cache for port b089582f-8b97-4630-8a47-45009013aceb. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 646.564070] env[69982]: DEBUG nova.network.neutron [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Updating instance_info_cache with network_info: [{"id": "b089582f-8b97-4630-8a47-45009013aceb", "address": "fa:16:3e:a8:be:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.117", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb089582f-8b", "ovs_interfaceid": "b089582f-8b97-4630-8a47-45009013aceb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.678745] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864006, 'name': Rename_Task, 'duration_secs': 0.188107} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.679125] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.679392] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f26fa5b4-0fdc-46ef-9428-c25807144a8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.691757] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 646.691757] env[69982]: value = "task-3864008" [ 646.691757] env[69982]: _type = "Task" [ 646.691757] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.707649] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864008, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.902533] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510237} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.906025] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.906025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.906025] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e875574-9430-4b08-95b7-5453771fb317 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.912852] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 646.912852] env[69982]: value = "task-3864009" [ 646.912852] env[69982]: _type = "Task" [ 646.912852] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.922213] env[69982]: DEBUG nova.compute.manager [None req-6dcd2844-9234-495c-a9a7-507ce52d4686 tempest-ServerDiagnosticsV248Test-433781694 tempest-ServerDiagnosticsV248Test-433781694-project-admin] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.923196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a460ffbd-42d9-4c1c-ae25-c15a2e2351a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.930042] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864009, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.941960] env[69982]: INFO nova.compute.manager [None req-6dcd2844-9234-495c-a9a7-507ce52d4686 tempest-ServerDiagnosticsV248Test-433781694 tempest-ServerDiagnosticsV248Test-433781694-project-admin] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Retrieving diagnostics [ 646.942936] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1c3608-c28d-477d-96d5-f22cbca621aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.982619] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5556360a-c902-4f94-9d74-75ac7580e5f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.991195] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfdd6460-6f3f-42c7-9383-eb46fd5d629e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.026042] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dc8654-75db-4cbe-8ca2-a31b41f8ee96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.034662] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1e4c238-eb43-4822-bddd-6127833d3bbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.050376] env[69982]: DEBUG nova.compute.provider_tree [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.071478] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae19ed07-fce5-4146-8531-8c4997e292da req-b4c33c0f-adc5-408e-8ad7-161f80456bc5 service nova] Releasing lock "refresh_cache-d0114728-9d44-4700-86a9-175e5f840b1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.202276] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864008, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.278154] env[69982]: DEBUG nova.network.neutron [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updating instance_info_cache with network_info: [{"id": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "address": "fa:16:3e:10:0c:44", "network": {"id": "b8822d4c-5a5e-4410-a83d-8794b8e4f11e", "bridge": "br-int", "label": "tempest-ServersTestJSON-343969811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c90023e59e624e6aa7d3d2363050619f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd67d1de-a9", "ovs_interfaceid": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.370375] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Successfully updated port: 62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 647.427491] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864009, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078328} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.432145] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.432145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c1621ad-22e5-48f7-8644-66ac752fc079 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.460609] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.461487] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f201d0-7f1b-4b98-b828-090b236f2ebb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.491227] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 647.491227] env[69982]: value = "task-3864010" [ 647.491227] env[69982]: _type = "Task" [ 647.491227] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.502634] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.554213] env[69982]: DEBUG nova.scheduler.client.report [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 647.701463] env[69982]: DEBUG oslo_vmware.api [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864008, 'name': PowerOnVM_Task, 'duration_secs': 0.811994} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.701750] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.701953] env[69982]: INFO nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Took 10.06 seconds to spawn the instance on the hypervisor. [ 647.702171] env[69982]: DEBUG nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.703091] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb84a3c-ea39-4690-a483-46b519875cb9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.781397] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Releasing lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.781749] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Instance network_info: |[{"id": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "address": "fa:16:3e:10:0c:44", "network": {"id": "b8822d4c-5a5e-4410-a83d-8794b8e4f11e", "bridge": "br-int", "label": "tempest-ServersTestJSON-343969811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c90023e59e624e6aa7d3d2363050619f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd67d1de-a9", "ovs_interfaceid": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 647.782219] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:0c:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d62c1cf-f39a-4626-9552-f1e13c692636', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd67d1de-a902-443e-a6bb-1cc4903f5ccd', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 647.794041] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Creating folder: Project (c90023e59e624e6aa7d3d2363050619f). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.794369] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14265f34-72d3-47b2-aa96-c7d87e6ac67a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.805691] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Created folder: Project (c90023e59e624e6aa7d3d2363050619f) in parent group-v767796. [ 647.805746] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Creating folder: Instances. Parent ref: group-v767806. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 647.805988] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02228911-1c6c-45aa-8987-5de72a8468b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.817703] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Created folder: Instances in parent group-v767806. [ 647.817995] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 647.818218] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 647.818434] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5d97596-47bf-4251-8eee-aef4d53b13f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.845794] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 647.845794] env[69982]: value = "task-3864013" [ 647.845794] env[69982]: _type = "Task" [ 647.845794] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.854503] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864013, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.873657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.874088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquired lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.874306] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 648.010024] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864010, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.011337] env[69982]: DEBUG nova.compute.manager [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Received event network-changed-5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.011868] env[69982]: DEBUG nova.compute.manager [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Refreshing instance network info cache due to event network-changed-5d41e03b-3fd8-4389-a457-2000cf628f86. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.012247] env[69982]: DEBUG oslo_concurrency.lockutils [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] Acquiring lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.012673] env[69982]: DEBUG oslo_concurrency.lockutils [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] Acquired lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.013036] env[69982]: DEBUG nova.network.neutron [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Refreshing network info cache for port 5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 648.061344] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.329s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.061849] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 648.068991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.538s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.070706] env[69982]: INFO nova.compute.claims [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.235482] env[69982]: INFO nova.compute.manager [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Took 15.29 seconds to build instance. [ 648.359353] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864013, 'name': CreateVM_Task, 'duration_secs': 0.387944} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.359741] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 648.364396] env[69982]: DEBUG oslo_vmware.service [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c86f9e-bf9f-42a4-8ec6-9384c12fb38f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.370709] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.370965] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.371363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 648.371829] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2614ef09-9e7b-4264-9f8f-ea870d3c01b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.384435] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 648.384435] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523feeff-6160-54fd-9113-cffb7039a34e" [ 648.384435] env[69982]: _type = "Task" [ 648.384435] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.399721] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.400053] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.400325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.400543] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.401237] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.401237] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56341db4-c98b-4dbe-adfe-7c935b59fa39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.411142] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.412038] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 648.413156] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1421256c-116f-4e67-a3fe-bb77bcc33278 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.421013] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eee93534-8a67-4d19-8260-b7fcc95c976c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.427690] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 648.427690] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5267796d-45dd-6e90-796d-a04a4e31c3dc" [ 648.427690] env[69982]: _type = "Task" [ 648.427690] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.436766] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5267796d-45dd-6e90-796d-a04a4e31c3dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.480234] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 648.505396] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864010, 'name': ReconfigVM_Task, 'duration_secs': 0.734872} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.505396] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.505396] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44cd0803-858f-49a6-a4e6-86cb551c081d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.521155] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 648.521155] env[69982]: value = "task-3864014" [ 648.521155] env[69982]: _type = "Task" [ 648.521155] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.536912] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864014, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.577546] env[69982]: DEBUG nova.compute.utils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 648.584031] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 648.584031] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 648.673484] env[69982]: DEBUG nova.policy [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 648.740475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-186c89e7-7340-4380-8c54-171f19e238b5 tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "d0114728-9d44-4700-86a9-175e5f840b1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.796s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.942148] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 648.942444] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Creating directory with path [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.943072] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab9bd42a-ca96-4769-94fe-6f9c416c8f6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.971826] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Created directory with path [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.972563] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Fetch image to [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 648.973236] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Downloading image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk on the data store datastore2 {{(pid=69982) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 648.974447] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196a1315-4824-413d-8926-4c43e1a95f28 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.987244] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896df270-1207-43b1-83d1-1b9e0d36cca4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.999960] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f982e8c-8fbe-4ff6-b07d-f5cbb833c1d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.047388] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c645b48-2659-4931-be0a-40b99ebd42b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.056637] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864014, 'name': Rename_Task, 'duration_secs': 0.271824} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.058224] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.058786] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac05d116-9b4b-441b-884c-d0b15a8d777c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.060521] env[69982]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-abefa8cf-b029-4697-85af-fa7c291f8e16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.067613] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 649.067613] env[69982]: value = "task-3864015" [ 649.067613] env[69982]: _type = "Task" [ 649.067613] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.077041] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864015, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.083869] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 649.162758] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Downloading image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to the data store datastore2 {{(pid=69982) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 649.210330] env[69982]: DEBUG nova.network.neutron [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Updating instance_info_cache with network_info: [{"id": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "address": "fa:16:3e:65:86:41", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d379af-7a", "ovs_interfaceid": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.273294] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 649.279464] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc56cc5e-7cb4-4341-88db-a201af64cafe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.356107] env[69982]: DEBUG nova.compute.manager [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Received event network-vif-plugged-dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.356319] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Acquiring lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.356622] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.356932] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.356986] env[69982]: DEBUG nova.compute.manager [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] No waiting events found dispatching network-vif-plugged-dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 649.357116] env[69982]: WARNING nova.compute.manager [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Received unexpected event network-vif-plugged-dd67d1de-a902-443e-a6bb-1cc4903f5ccd for instance with vm_state building and task_state spawning. [ 649.357268] env[69982]: DEBUG nova.compute.manager [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Received event network-changed-dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.357410] env[69982]: DEBUG nova.compute.manager [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Refreshing instance network info cache due to event network-changed-dd67d1de-a902-443e-a6bb-1cc4903f5ccd. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 649.357628] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Acquiring lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.357710] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Acquired lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.357853] env[69982]: DEBUG nova.network.neutron [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Refreshing network info cache for port dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 649.370696] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415e9017-9bd3-4c81-818e-cf35af3fdc77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.420894] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0301535f-5568-4ec3-a281-5e4d6a9b49a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.433559] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfc23f0-64d8-45ba-a659-c0950ab7c76c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.445106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "6de35617-22cf-4a32-8651-28ea67532b8f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.445396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.465332] env[69982]: DEBUG nova.compute.provider_tree [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.528302] env[69982]: DEBUG nova.network.neutron [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updated VIF entry in instance network info cache for port 5d41e03b-3fd8-4389-a457-2000cf628f86. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 649.528611] env[69982]: DEBUG nova.network.neutron [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.596998] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864015, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.714690] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Releasing lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.715513] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Instance network_info: |[{"id": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "address": "fa:16:3e:65:86:41", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d379af-7a", "ovs_interfaceid": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 649.716353] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:86:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62d379af-7ac4-4a77-a97c-43b6d0fd5a3f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 649.725365] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Creating folder: Project (be8434cce0e44f47bd7d88a2f1be7b6e). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.727207] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cd70aa4-77f1-40b0-b5a6-1ec4fa69f0d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.740953] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Created folder: Project (be8434cce0e44f47bd7d88a2f1be7b6e) in parent group-v767796. [ 649.741216] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Creating folder: Instances. Parent ref: group-v767809. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 649.741645] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bddbcf7-4cbf-4eaa-a889-c0d624a5aa0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.754395] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Created folder: Instances in parent group-v767809. [ 649.754800] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 649.755120] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 649.755522] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fd7cecf-896c-4f6e-8a1b-a49f69e1faa1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.779353] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 649.779353] env[69982]: value = "task-3864018" [ 649.779353] env[69982]: _type = "Task" [ 649.779353] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.792790] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864018, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.948634] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 649.973224] env[69982]: DEBUG nova.scheduler.client.report [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 650.034990] env[69982]: DEBUG oslo_concurrency.lockutils [req-8d09638f-77de-4d1c-bf6f-e264b0483e2a req-56588580-5fac-4c28-956b-bc8612f84d3e service nova] Releasing lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.091065] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864015, 'name': PowerOnVM_Task} progress is 82%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.097490] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 650.133635] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 650.137025] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 650.137025] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 650.137025] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 650.137025] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 650.137025] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 650.137649] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 650.137649] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 650.137649] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 650.137649] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 650.137649] env[69982]: DEBUG nova.virt.hardware [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 650.137966] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1fef811-5bac-4981-abbe-dc2f58f5c40c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.147981] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961db5ff-315b-40f7-ba09-6310784b6f87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.254619] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 650.255353] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 650.293271] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864018, 'name': CreateVM_Task, 'duration_secs': 0.398279} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.293582] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.294780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.294891] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.295259] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.295769] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b1e525e-0b8d-4a64-aefb-968e11cf6a82 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.304904] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 650.304904] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52391ecc-acdc-32e3-b7ed-006b91410e6a" [ 650.304904] env[69982]: _type = "Task" [ 650.304904] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.320694] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52391ecc-acdc-32e3-b7ed-006b91410e6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.321581] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Downloaded image file data a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk on the data store datastore2 {{(pid=69982) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 650.323664] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 650.323978] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Copying Virtual Disk [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk to [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.324327] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31d5ca34-e4b9-448d-a886-c63aab8009ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.332443] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 650.332443] env[69982]: value = "task-3864019" [ 650.332443] env[69982]: _type = "Task" [ 650.332443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.344417] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.479022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.480105] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.481154] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.483515] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.005s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.485135] env[69982]: INFO nova.compute.claims [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.589234] env[69982]: DEBUG oslo_vmware.api [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864015, 'name': PowerOnVM_Task, 'duration_secs': 1.265686} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.589698] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 650.590027] env[69982]: INFO nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Took 10.72 seconds to spawn the instance on the hypervisor. [ 650.590305] env[69982]: DEBUG nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 650.591535] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7b7907-180f-4eb8-a0f5-40595cf88589 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.692223] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Successfully created port: 9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.802660] env[69982]: DEBUG nova.network.neutron [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updated VIF entry in instance network info cache for port dd67d1de-a902-443e-a6bb-1cc4903f5ccd. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 650.803526] env[69982]: DEBUG nova.network.neutron [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updating instance_info_cache with network_info: [{"id": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "address": "fa:16:3e:10:0c:44", "network": {"id": "b8822d4c-5a5e-4410-a83d-8794b8e4f11e", "bridge": "br-int", "label": "tempest-ServersTestJSON-343969811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c90023e59e624e6aa7d3d2363050619f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd67d1de-a9", "ovs_interfaceid": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.829183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.829468] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 650.829689] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.852252] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864019, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.968757] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.970499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.989678] env[69982]: DEBUG nova.compute.utils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.996022] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.996022] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 651.125243] env[69982]: INFO nova.compute.manager [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Took 15.96 seconds to build instance. [ 651.152256] env[69982]: DEBUG nova.policy [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bbed9f8c0df642159e96d80fb6d73e54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aeecb3137ebc4b5fa087ef207104755c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 651.308786] env[69982]: DEBUG oslo_concurrency.lockutils [req-f467a997-c317-49a7-a573-f8966282d90a req-c0d62ef7-1ab0-44c8-82ee-af338e691c16 service nova] Releasing lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.347185] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864019, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.473651] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.502289] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.627464] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67dbb00e-d6a3-4be9-93c5-d73666cf1248 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.474s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.707930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892c850f-b2b0-4729-9c2e-fc87643d99bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.722242] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a654bb3f-2dab-43d5-9c44-3f7adf98de26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.763451] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68dcc3c9-add6-4aa5-9995-387c251fc2a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.772878] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44320ae-c0f1-4964-875d-33161464baff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.788148] env[69982]: DEBUG nova.compute.provider_tree [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.845549] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864019, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.11975} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.845871] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Copied Virtual Disk [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk to [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.846068] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleting the datastore file [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/tmp-sparse.vmdk {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.848503] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de205a03-025e-49ce-bb3b-5889658b289d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.854443] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 651.854443] env[69982]: value = "task-3864020" [ 651.854443] env[69982]: _type = "Task" [ 651.854443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.866368] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864020, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.004491] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.292453] env[69982]: DEBUG nova.scheduler.client.report [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.350153] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Successfully created port: bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 652.369093] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864020, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.051666} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.370510] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.371279] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Moving file from [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0/a4e69d6f-1c15-4f57-92a8-5e81c6be8172 to [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172. {{(pid=69982) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 652.371745] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-4cc6fcf4-6876-4720-98ca-c562a076d069 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.381455] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 652.381455] env[69982]: value = "task-3864021" [ 652.381455] env[69982]: _type = "Task" [ 652.381455] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.396104] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864021, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.519274] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.561454] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 652.561566] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 652.561714] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 652.561922] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 652.562102] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 652.562280] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 652.562509] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 652.562688] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 652.563626] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 652.567021] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 652.567021] env[69982]: DEBUG nova.virt.hardware [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 652.567021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d79628b-ffa7-4fe7-a54d-020999c94366 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.575895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e326ff3-79f7-4e47-9731-949f0dd8e06e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.661172] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.661572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.801070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.801804] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.805021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.801s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.808403] env[69982]: INFO nova.compute.claims [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.893789] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864021, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.036485} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.894883] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] File moved {{(pid=69982) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 652.895722] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Cleaning up location [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 652.895722] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleting the datastore file [datastore2] vmware_temp/4ba25db4-5a2a-4a84-a8f0-82b8336a75b0 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.896666] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19ace730-3e28-4a72-ad95-4476512bee5f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.905445] env[69982]: DEBUG nova.compute.manager [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Received event network-vif-plugged-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.905781] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Acquiring lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.906266] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.906377] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.906528] env[69982]: DEBUG nova.compute.manager [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] No waiting events found dispatching network-vif-plugged-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.906761] env[69982]: WARNING nova.compute.manager [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Received unexpected event network-vif-plugged-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f for instance with vm_state building and task_state spawning. [ 652.906867] env[69982]: DEBUG nova.compute.manager [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Received event network-changed-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.907117] env[69982]: DEBUG nova.compute.manager [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Refreshing instance network info cache due to event network-changed-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 652.907220] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Acquiring lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.907493] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Acquired lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.907493] env[69982]: DEBUG nova.network.neutron [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Refreshing network info cache for port 62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 652.911764] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 652.911764] env[69982]: value = "task-3864022" [ 652.911764] env[69982]: _type = "Task" [ 652.911764] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.927218] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.948781] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "bba73604-c54f-4643-9e4c-326b643b3d51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.949114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.164176] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.318617] env[69982]: DEBUG nova.compute.utils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.325666] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.325875] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.429356] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037625} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.429356] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 653.429356] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48541ffc-3c02-4ab3-9507-e82f1f9a2416 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.437163] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 653.437163] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cd226-aa60-09da-8798-5b1a71bc9e74" [ 653.437163] env[69982]: _type = "Task" [ 653.437163] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.446333] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cd226-aa60-09da-8798-5b1a71bc9e74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.454335] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.512085] env[69982]: DEBUG nova.policy [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11ce0ddb8cf445f2ab641704393ebe36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e788d70221a4c9c834bd785b689b7f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.611864] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Acquiring lock "d0114728-9d44-4700-86a9-175e5f840b1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.612143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "d0114728-9d44-4700-86a9-175e5f840b1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.612346] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Acquiring lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.612520] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.612695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "d0114728-9d44-4700-86a9-175e5f840b1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.620392] env[69982]: INFO nova.compute.manager [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Terminating instance [ 653.691797] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.745865] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Successfully updated port: 9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.827161] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.952692] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cd226-aa60-09da-8798-5b1a71bc9e74, 'name': SearchDatastore_Task, 'duration_secs': 0.01686} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.955901] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.958014] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 1bdb1577-cc35-4839-8992-ae3b4ab87eb2/1bdb1577-cc35-4839-8992-ae3b4ab87eb2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 653.958014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.958014] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.958014] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-739da585-e9b8-486b-b12a-d672cd8725ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.959416] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bd1c4da-7a04-4f64-81f1-2b45952876db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.981025] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 653.981025] env[69982]: value = "task-3864023" [ 653.981025] env[69982]: _type = "Task" [ 653.981025] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.992245] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.992483] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.998445] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fee9cd84-69d3-41a1-9f55-57413d6e2a06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.004168] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.005356] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.015322] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 654.015322] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e728fb-b704-b4a5-16f6-8ce5f2ec7c4c" [ 654.015322] env[69982]: _type = "Task" [ 654.015322] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.029550] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e728fb-b704-b4a5-16f6-8ce5f2ec7c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.126845] env[69982]: DEBUG nova.compute.manager [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 654.129060] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 654.129534] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab2627-84e8-45a4-a038-92f7cabf1d12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.141189] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 654.141485] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1570b210-080d-42b9-b5b5-46218086dcbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.155474] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Waiting for the task: (returnval){ [ 654.155474] env[69982]: value = "task-3864024" [ 654.155474] env[69982]: _type = "Task" [ 654.155474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.178085] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864024, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.217940] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f242b16-0336-4b11-82ad-91aa63d17cee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.228934] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f4baf4-c3d8-4a76-90dc-bf7e81deba4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.266013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.266013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.266013] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 654.268607] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372c5a71-0822-49c3-99f7-499c183032a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.281742] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738839dc-730c-4c67-af11-540f45da4b0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.297556] env[69982]: DEBUG nova.compute.provider_tree [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.374952] env[69982]: DEBUG nova.network.neutron [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Updated VIF entry in instance network info cache for port 62d379af-7ac4-4a77-a97c-43b6d0fd5a3f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 654.375409] env[69982]: DEBUG nova.network.neutron [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Updating instance_info_cache with network_info: [{"id": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "address": "fa:16:3e:65:86:41", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.182", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62d379af-7a", "ovs_interfaceid": "62d379af-7ac4-4a77-a97c-43b6d0fd5a3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.498860] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864023, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.541303] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e728fb-b704-b4a5-16f6-8ce5f2ec7c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.017006} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.542727] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-230b6d79-e04a-423e-a6e9-3ab19acec5ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.554500] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 654.554500] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c31a-a24a-f59e-894f-1adf88d2f7ad" [ 654.554500] env[69982]: _type = "Task" [ 654.554500] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.569999] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c31a-a24a-f59e-894f-1adf88d2f7ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.674260] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864024, 'name': PowerOffVM_Task, 'duration_secs': 0.206344} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.674680] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 654.674895] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 654.675219] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14fbc4da-e659-4450-921a-a8bc3fbb2846 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.767631] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 654.767631] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 654.767631] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Deleting the datastore file [datastore1] d0114728-9d44-4700-86a9-175e5f840b1d {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.768048] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-599e3960-3030-4468-94d8-461e5026d27a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.779054] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Waiting for the task: (returnval){ [ 654.779054] env[69982]: value = "task-3864026" [ 654.779054] env[69982]: _type = "Task" [ 654.779054] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.792706] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.802290] env[69982]: DEBUG nova.scheduler.client.report [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.846268] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.887038] env[69982]: DEBUG oslo_concurrency.lockutils [req-73566166-85b3-42ae-8a6f-ceaf2a43df19 req-4d00f043-71f0-4bb2-8509-ad90875005b3 service nova] Releasing lock "refresh_cache-e82ae1bd-c31b-44ca-9608-9348b8eac8dc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.890409] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 654.898198] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 654.898489] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 654.898574] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 654.899092] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 654.899092] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 654.899092] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 654.899371] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 654.905177] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 654.905469] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 654.905696] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 654.906144] env[69982]: DEBUG nova.virt.hardware [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 654.907634] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09fbfaea-0dc7-425e-8d43-47d600b72878 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.921032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af2cbf9-193b-4e40-ac93-d8cd894a21fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.005651] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783648} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.005965] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 1bdb1577-cc35-4839-8992-ae3b4ab87eb2/1bdb1577-cc35-4839-8992-ae3b4ab87eb2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.006168] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.006439] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-28eed0d3-5312-4657-aa7c-2381f075d9c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.016683] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 655.016683] env[69982]: value = "task-3864027" [ 655.016683] env[69982]: _type = "Task" [ 655.016683] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.028110] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.068821] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c31a-a24a-f59e-894f-1adf88d2f7ad, 'name': SearchDatastore_Task, 'duration_secs': 0.061575} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.069175] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.069461] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e82ae1bd-c31b-44ca-9608-9348b8eac8dc/e82ae1bd-c31b-44ca-9608-9348b8eac8dc.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.069748] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-680a5097-c10b-4bfd-8202-d1b04efdd143 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.082903] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 655.082903] env[69982]: value = "task-3864028" [ 655.082903] env[69982]: _type = "Task" [ 655.082903] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.093504] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.293175] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.309563] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.504s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.310214] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 655.313384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.622s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.315043] env[69982]: INFO nova.compute.claims [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 655.530557] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.307211} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.530931] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.531612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2397f88c-7004-499d-90b9-ae89d5133b51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.567718] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 1bdb1577-cc35-4839-8992-ae3b4ab87eb2/1bdb1577-cc35-4839-8992-ae3b4ab87eb2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.571034] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29fb548f-9cc2-4390-90e0-664d590e36dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.585156] env[69982]: DEBUG nova.network.neutron [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Updating instance_info_cache with network_info: [{"id": "9e46d480-3573-461c-a8d3-0a5bc4944687", "address": "fa:16:3e:78:a4:59", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e46d480-35", "ovs_interfaceid": "9e46d480-3573-461c-a8d3-0a5bc4944687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.591278] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Successfully created port: 2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 655.596244] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 655.596244] env[69982]: value = "task-3864029" [ 655.596244] env[69982]: _type = "Task" [ 655.596244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.599600] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864028, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.607998] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.608244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.617258] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.799365] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.821346] env[69982]: DEBUG nova.compute.utils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 655.825324] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 655.825862] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 655.947294] env[69982]: DEBUG nova.policy [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cd1d52475c8443d9ba86144187623806', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '43849a9ac5c349d29082fc270a8afd0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 656.095432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.095755] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Instance network_info: |[{"id": "9e46d480-3573-461c-a8d3-0a5bc4944687", "address": "fa:16:3e:78:a4:59", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e46d480-35", "ovs_interfaceid": "9e46d480-3573-461c-a8d3-0a5bc4944687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.097021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:a4:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e46d480-3573-461c-a8d3-0a5bc4944687', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.111470] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating folder: Project (babff47774384a5ca2d938bcc6331aa6). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.111470] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-274257a7-be9e-4c96-ac8e-15b91acdd588 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.125619] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 656.130096] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864028, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.015583} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.131297] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e82ae1bd-c31b-44ca-9608-9348b8eac8dc/e82ae1bd-c31b-44ca-9608-9348b8eac8dc.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.131542] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.133915] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22ed98d3-c788-40e1-9162-a83b4204e572 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.144781] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.148577] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created folder: Project (babff47774384a5ca2d938bcc6331aa6) in parent group-v767796. [ 656.151049] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating folder: Instances. Parent ref: group-v767812. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 656.151049] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd925bba-82e1-485f-8d78-958c4c49efde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.164927] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 656.164927] env[69982]: value = "task-3864032" [ 656.164927] env[69982]: _type = "Task" [ 656.164927] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.169905] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created folder: Instances in parent group-v767812. [ 656.171219] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.171500] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.173189] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cb4b865-e299-40d8-9666-017c86885581 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.197035] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.206206] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.206206] env[69982]: value = "task-3864033" [ 656.206206] env[69982]: _type = "Task" [ 656.206206] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.221184] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864033, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.298439] env[69982]: DEBUG oslo_vmware.api [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Task: {'id': task-3864026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.04455} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.298840] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 656.299131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 656.299373] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 656.299578] env[69982]: INFO nova.compute.manager [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Took 2.17 seconds to destroy the instance on the hypervisor. [ 656.299888] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.300124] env[69982]: DEBUG nova.compute.manager [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 656.300224] env[69982]: DEBUG nova.network.neutron [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 656.328884] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.627964] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864029, 'name': ReconfigVM_Task, 'duration_secs': 0.993425} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.628387] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 1bdb1577-cc35-4839-8992-ae3b4ab87eb2/1bdb1577-cc35-4839-8992-ae3b4ab87eb2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.629239] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51c54fca-1405-40ac-8fbd-cf90e83100b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.644399] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 656.644399] env[69982]: value = "task-3864034" [ 656.644399] env[69982]: _type = "Task" [ 656.644399] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.667073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.671682] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864034, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.683660] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864032, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100886} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.685123] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.686047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e6a262-be54-42b3-b953-9960d7fb0313 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.689670] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c178997-706d-47f2-9168-31902ec09abe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.719360] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71dbf49-ced3-408e-ac09-335969426892 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.735513] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] e82ae1bd-c31b-44ca-9608-9348b8eac8dc/e82ae1bd-c31b-44ca-9608-9348b8eac8dc.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.738888] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7050aa2-bdfa-40b9-ad9d-feb54fcb8e48 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.762651] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864033, 'name': CreateVM_Task, 'duration_secs': 0.42645} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.792313] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.793048] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 656.793048] env[69982]: value = "task-3864035" [ 656.793048] env[69982]: _type = "Task" [ 656.793048] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.794230] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.794230] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.794510] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.795987] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5aaeab5-5920-411b-8b4c-211878a7d711 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.801658] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff5fb9eb-1b4a-4571-ab23-9f3fb14a3678 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.810280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.810631] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.825231] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3eece3-77d0-42ec-81b4-d7173c2da77e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.828741] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.829146] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 656.829146] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527dd5bb-0982-2cc5-4207-530703a9b85b" [ 656.829146] env[69982]: _type = "Task" [ 656.829146] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.848258] env[69982]: DEBUG nova.compute.provider_tree [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.860939] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527dd5bb-0982-2cc5-4207-530703a9b85b, 'name': SearchDatastore_Task, 'duration_secs': 0.013412} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.862586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.862586] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.862586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.862586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.863218] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.863218] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-291cbd7f-4012-4d75-b83a-50f3d3085eb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.877470] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.877652] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.878443] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcfe0c95-a7e0-4053-8201-e1695f13143b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.885433] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 656.885433] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52741d63-6eac-da3d-a8d1-a8895662dc88" [ 656.885433] env[69982]: _type = "Task" [ 656.885433] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.889910] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Successfully updated port: bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.900688] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52741d63-6eac-da3d-a8d1-a8895662dc88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.000023] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Successfully created port: 79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.160923] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864034, 'name': Rename_Task, 'duration_secs': 0.233939} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.161233] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.161481] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8320c662-190f-42b1-a44c-047b58d012f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.172811] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 657.172811] env[69982]: value = "task-3864036" [ 657.172811] env[69982]: _type = "Task" [ 657.172811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.184597] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.245894] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.246380] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.297038] env[69982]: DEBUG nova.compute.manager [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Received event network-vif-plugged-9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 657.297038] env[69982]: DEBUG oslo_concurrency.lockutils [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] Acquiring lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.297038] env[69982]: DEBUG oslo_concurrency.lockutils [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.299562] env[69982]: DEBUG oslo_concurrency.lockutils [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.299562] env[69982]: DEBUG nova.compute.manager [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] No waiting events found dispatching network-vif-plugged-9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 657.299562] env[69982]: WARNING nova.compute.manager [req-36d484c1-7e6f-45a6-a277-d07b57256684 req-9f760624-3480-4f81-87df-b359d2f85ce1 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Received unexpected event network-vif-plugged-9e46d480-3573-461c-a8d3-0a5bc4944687 for instance with vm_state building and task_state spawning. [ 657.312676] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864035, 'name': ReconfigVM_Task, 'duration_secs': 0.356108} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.313060] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Reconfigured VM instance instance-00000005 to attach disk [datastore2] e82ae1bd-c31b-44ca-9608-9348b8eac8dc/e82ae1bd-c31b-44ca-9608-9348b8eac8dc.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.313787] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57615384-c2f4-4856-9520-6125a7bca847 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.321772] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 657.325815] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 657.325815] env[69982]: value = "task-3864037" [ 657.325815] env[69982]: _type = "Task" [ 657.325815] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.343814] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864037, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.352263] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 657.355943] env[69982]: DEBUG nova.scheduler.client.report [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.396739] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.396958] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.401558] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.401558] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.401558] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.401558] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.401558] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.402185] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.402185] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.402185] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.402185] env[69982]: DEBUG nova.virt.hardware [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.402185] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.402824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.402824] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 657.402824] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d60e6b40-d8af-4434-a010-89a7888bc240 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.411686] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52741d63-6eac-da3d-a8d1-a8895662dc88, 'name': SearchDatastore_Task, 'duration_secs': 0.013652} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.412928] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c61b3330-b6ec-4702-87fc-9d4bef3bbf94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.421479] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d91608d-1c45-434b-be96-d831601a2449 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.428406] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 657.428406] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525c66b8-ecb9-8169-e441-d84ad93d0bc2" [ 657.428406] env[69982]: _type = "Task" [ 657.428406] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.454725] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525c66b8-ecb9-8169-e441-d84ad93d0bc2, 'name': SearchDatastore_Task, 'duration_secs': 0.015716} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.454806] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.455094] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8/5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.455986] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aad03e11-0dd6-4226-9d0f-50fd0e033480 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.468960] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 657.468960] env[69982]: value = "task-3864038" [ 657.468960] env[69982]: _type = "Task" [ 657.468960] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.485299] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864038, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.642412] env[69982]: DEBUG nova.network.neutron [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.690521] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864036, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.848111] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864037, 'name': Rename_Task, 'duration_secs': 0.163704} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.848452] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.848736] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c484949b-c7cf-4b20-8c94-24729e081f23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.855813] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.860358] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 657.860358] env[69982]: value = "task-3864039" [ 657.860358] env[69982]: _type = "Task" [ 657.860358] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.864425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.864955] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.872023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.864s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.872023] env[69982]: INFO nova.compute.claims [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.883757] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.982472] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864038, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.097866] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 658.149379] env[69982]: INFO nova.compute.manager [-] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Took 1.85 seconds to deallocate network for instance. [ 658.186912] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864036, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.382805] env[69982]: DEBUG nova.compute.utils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.386697] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864039, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.390824] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.391357] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.490630] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864038, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.534141] env[69982]: DEBUG nova.policy [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '238f1dee39314953b8e7d920aa8e3d6f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbd8eef253884e57bb88beed7b50557a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.662490] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.692872] env[69982]: DEBUG oslo_vmware.api [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864036, 'name': PowerOnVM_Task, 'duration_secs': 1.256654} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.693718] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.694109] env[69982]: INFO nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Took 16.59 seconds to spawn the instance on the hypervisor. [ 658.694526] env[69982]: DEBUG nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.695542] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c719d583-0cd5-46ab-be79-0d949bc891b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.831839] env[69982]: DEBUG nova.network.neutron [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.876144] env[69982]: DEBUG oslo_vmware.api [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864039, 'name': PowerOnVM_Task, 'duration_secs': 0.697408} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.876592] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 658.876946] env[69982]: INFO nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Took 14.50 seconds to spawn the instance on the hypervisor. [ 658.877317] env[69982]: DEBUG nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.878230] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2f2a15-ebd1-47ad-a242-71d20e6cb5da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.897290] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.986492] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864038, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.205186] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d11e867-0405-44c1-a866-8fdb56c950f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.225920] env[69982]: INFO nova.compute.manager [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Took 23.39 seconds to build instance. [ 659.229695] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286c0948-ea3f-49bf-a1cc-e05bf8fbcd20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.280436] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8a79499-b388-4ce7-b7b6-d73c27981fd1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.294123] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a6e382-0ac1-4640-bd96-24c010538ae2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.319986] env[69982]: DEBUG nova.compute.provider_tree [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.335090] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.335416] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Instance network_info: |[{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 659.335851] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:34:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bbcc00a3-079c-427d-9966-920e8614cd44', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 659.349764] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Creating folder: Project (aeecb3137ebc4b5fa087ef207104755c). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.350402] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b2b411c-e491-45b3-a16d-e1bb97ec6443 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.362230] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Created folder: Project (aeecb3137ebc4b5fa087ef207104755c) in parent group-v767796. [ 659.362284] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Creating folder: Instances. Parent ref: group-v767815. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.362539] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab9edbd1-1cd9-4b58-b8d0-b9a1dbdc4738 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.377409] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Created folder: Instances in parent group-v767815. [ 659.377810] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.378026] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.378242] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bfc6cbe-7de3-4ba3-8332-88ba09871d1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.420682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.421222] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.421720] env[69982]: INFO nova.compute.manager [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Took 20.04 seconds to build instance. [ 659.433731] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.433731] env[69982]: value = "task-3864042" [ 659.433731] env[69982]: _type = "Task" [ 659.433731] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.447732] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864042, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.489923] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864038, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.558191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.490233] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8/5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.490489] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.490792] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86c86c9c-643a-41f5-86ff-259ceecdc58e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.501451] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 659.501451] env[69982]: value = "task-3864043" [ 659.501451] env[69982]: _type = "Task" [ 659.501451] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.518841] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.562665] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Successfully updated port: 2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 659.572089] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Successfully created port: 584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 659.589780] env[69982]: DEBUG nova.compute.manager [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-vif-plugged-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.589905] env[69982]: DEBUG oslo_concurrency.lockutils [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] Acquiring lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.591368] env[69982]: DEBUG oslo_concurrency.lockutils [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.591582] env[69982]: DEBUG oslo_concurrency.lockutils [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.591800] env[69982]: DEBUG nova.compute.manager [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] No waiting events found dispatching network-vif-plugged-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 659.591981] env[69982]: WARNING nova.compute.manager [req-67b47d61-f7c2-4041-b0f3-fe622a597925 req-830b5473-99d3-4dc2-a5db-4d8c6c64e95d service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received unexpected event network-vif-plugged-bbcc00a3-079c-427d-9966-920e8614cd44 for instance with vm_state building and task_state spawning. [ 659.738066] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f42a2fb1-8565-4635-8a45-4c76256e139f tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.913s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.826084] env[69982]: DEBUG nova.scheduler.client.report [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.924805] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.934441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20f6595e-8f9c-4fbc-b54e-818a847605d6 tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.563s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.946572] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864042, 'name': CreateVM_Task, 'duration_secs': 0.473149} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.947060] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.948107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.948325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.948671] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 659.952053] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-067bb7f0-8612-4bed-aeff-d52c622ea421 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.958926] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 659.958926] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520750b1-f12c-ef84-2111-b0b01fddfde4" [ 659.958926] env[69982]: _type = "Task" [ 659.958926] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.965160] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 659.965472] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 659.965664] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 659.965882] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 659.966107] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 659.970217] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 659.970217] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 659.970217] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 659.970217] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 659.970217] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 659.970778] env[69982]: DEBUG nova.virt.hardware [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 659.970778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b6c983-d4f7-4fa9-9241-b3f75fe0c1ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.981518] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520750b1-f12c-ef84-2111-b0b01fddfde4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.983054] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1624d3e3-4896-48e4-a8ff-6dca984146c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.014840] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076116} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.015134] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 660.015963] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4316e1dc-a089-40cd-94cd-b1cf0709083d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.041942] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8/5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 660.042607] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07b77b5d-1dee-4eca-8171-e1b506fe927d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.064592] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 660.064592] env[69982]: value = "task-3864044" [ 660.064592] env[69982]: _type = "Task" [ 660.064592] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.065094] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.065221] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.065363] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 660.078416] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864044, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.241385] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.281315] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Successfully updated port: 79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 660.311835] env[69982]: DEBUG nova.compute.manager [None req-e1dfc644-1fec-4c38-a77c-08e26de8c5ca tempest-ServerDiagnosticsV248Test-433781694 tempest-ServerDiagnosticsV248Test-433781694-project-admin] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 660.313048] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eefebf3-1227-41c3-95e2-c5080da77f4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.322708] env[69982]: INFO nova.compute.manager [None req-e1dfc644-1fec-4c38-a77c-08e26de8c5ca tempest-ServerDiagnosticsV248Test-433781694 tempest-ServerDiagnosticsV248Test-433781694-project-admin] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Retrieving diagnostics [ 660.325869] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16830f5-6363-4cd5-8881-996fca5da50c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.331388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.331996] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.370199] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.702s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.371373] env[69982]: INFO nova.compute.claims [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.442061] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 660.472903] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520750b1-f12c-ef84-2111-b0b01fddfde4, 'name': SearchDatastore_Task, 'duration_secs': 0.021068} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.473239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.473471] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.473709] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.473849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.474033] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.474298] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5cd1950d-5d6a-4054-a002-9d709e595a80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.485415] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.485632] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.488049] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb8bfee-8d0c-4985-b8ac-c18e6a14ec8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.506472] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 660.506472] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d9c79d-2964-3a02-033d-11d43cbb985e" [ 660.506472] env[69982]: _type = "Task" [ 660.506472] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.524240] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d9c79d-2964-3a02-033d-11d43cbb985e, 'name': SearchDatastore_Task, 'duration_secs': 0.010495} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.525261] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98f3e16-3971-4269-9e26-3973a9aa7855 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.533465] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 660.533465] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ccf90-e9ee-1657-501d-f5ac79f264b1" [ 660.533465] env[69982]: _type = "Task" [ 660.533465] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.544502] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ccf90-e9ee-1657-501d-f5ac79f264b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.587743] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.673838] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 660.775954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.784790] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.784940] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquired lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.785107] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 660.837950] env[69982]: DEBUG nova.compute.utils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.839693] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 660.841605] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 660.927024] env[69982]: DEBUG nova.policy [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11ce0ddb8cf445f2ab641704393ebe36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e788d70221a4c9c834bd785b689b7f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 660.975630] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.027747] env[69982]: DEBUG nova.compute.manager [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 661.048796] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ccf90-e9ee-1657-501d-f5ac79f264b1, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.049127] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.049391] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 743a4a52-ce35-4ec1-9286-e0c470e87186/743a4a52-ce35-4ec1-9286-e0c470e87186.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.049654] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47d7f544-6e25-40d1-99ee-c6a994f6145b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.060118] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 661.060118] env[69982]: value = "task-3864045" [ 661.060118] env[69982]: _type = "Task" [ 661.060118] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.074373] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864045, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.091024] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864044, 'name': ReconfigVM_Task, 'duration_secs': 0.60381} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.091024] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8/5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 661.091024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-105a35ab-9d77-4816-9043-64c76f5e3bdf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.096998] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 661.096998] env[69982]: value = "task-3864046" [ 661.096998] env[69982]: _type = "Task" [ 661.096998] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.110696] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864046, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.162863] env[69982]: DEBUG nova.network.neutron [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updating instance_info_cache with network_info: [{"id": "2819d991-2637-403b-b564-70e27a846f61", "address": "fa:16:3e:dd:be:99", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2819d991-26", "ovs_interfaceid": "2819d991-2637-403b-b564-70e27a846f61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.348919] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.404071] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 661.552894] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.577356] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864045, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.608635] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864046, 'name': Rename_Task, 'duration_secs': 0.173934} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.609184] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.609540] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52c52534-da92-4635-81f8-cf6e1fcc0861 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.622094] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 661.622094] env[69982]: value = "task-3864047" [ 661.622094] env[69982]: _type = "Task" [ 661.622094] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.636463] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.637717] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Successfully created port: f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 661.669889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.670598] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Instance network_info: |[{"id": "2819d991-2637-403b-b564-70e27a846f61", "address": "fa:16:3e:dd:be:99", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2819d991-26", "ovs_interfaceid": "2819d991-2637-403b-b564-70e27a846f61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 661.672871] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:be:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2819d991-2637-403b-b564-70e27a846f61', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 661.685428] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Creating folder: Project (7e788d70221a4c9c834bd785b689b7f6). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.690436] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a6344e3-c47d-45ac-acfd-3161ede4824c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.703736] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Created folder: Project (7e788d70221a4c9c834bd785b689b7f6) in parent group-v767796. [ 661.704169] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Creating folder: Instances. Parent ref: group-v767818. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 661.704393] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6afc6b0-30a1-4e24-8d77-9de293866d8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.721682] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Created folder: Instances in parent group-v767818. [ 661.721925] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 661.722150] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 661.722373] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fcaf62a5-52fb-4a9b-bba3-d99e927fa40c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.747174] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 661.747174] env[69982]: value = "task-3864050" [ 661.747174] env[69982]: _type = "Task" [ 661.747174] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.756482] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864050, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.761147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d94450-5d0f-4d72-8856-c3fb9f6b9e87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.769420] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf18f24-69a5-485e-a414-158ab1134026 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.809425] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cf9fc8-8179-4cbe-9bfb-bc0c8ee987c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.820534] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2221f1aa-0f8d-4c1b-9e27-784179a4d43f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.841812] env[69982]: DEBUG nova.compute.provider_tree [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.953488] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Successfully updated port: 584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 662.072256] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864045, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.569369} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.072565] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 743a4a52-ce35-4ec1-9286-e0c470e87186/743a4a52-ce35-4ec1-9286-e0c470e87186.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.072799] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.073295] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ca70be9-924a-488e-81d8-2a1629eeb48e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.083184] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 662.083184] env[69982]: value = "task-3864051" [ 662.083184] env[69982]: _type = "Task" [ 662.083184] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.092494] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.135251] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864047, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.210329] env[69982]: DEBUG nova.network.neutron [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating instance_info_cache with network_info: [{"id": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "address": "fa:16:3e:b7:54:77", "network": {"id": "cc0fd9bc-db9f-4bd5-a4cf-197e8f8c0253", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1842391413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43849a9ac5c349d29082fc270a8afd0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79537ce6-7e", "ovs_interfaceid": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.261846] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864050, 'name': CreateVM_Task, 'duration_secs': 0.410212} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.261846] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 662.262905] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.262987] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.263369] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 662.265600] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b2e6f3b-32c5-46bb-8619-ad193fbf1e53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.271559] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 662.271559] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e968d3-4dab-cd0d-4838-083386eef443" [ 662.271559] env[69982]: _type = "Task" [ 662.271559] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.280585] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e968d3-4dab-cd0d-4838-083386eef443, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.348485] env[69982]: DEBUG nova.scheduler.client.report [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.360253] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.403884] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.404768] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.405591] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.405591] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.405591] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.405591] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.405834] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.405975] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.406076] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.406222] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.406537] env[69982]: DEBUG nova.virt.hardware [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.407440] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177083fc-2274-449e-81da-ea00fc871e8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.420134] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9eb308-e014-4383-968e-c407893f712a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.457909] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.457909] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquired lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.458088] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 662.604718] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.237953} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.604718] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.605102] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c203a2c-c14d-4e8f-98e8-bdf86efab9a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.644226] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 743a4a52-ce35-4ec1-9286-e0c470e87186/743a4a52-ce35-4ec1-9286-e0c470e87186.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.647635] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06cd3059-37c1-428a-b9ac-f7235ba16676 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.674908] env[69982]: DEBUG oslo_vmware.api [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864047, 'name': PowerOnVM_Task, 'duration_secs': 1.019856} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.676312] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.676514] env[69982]: INFO nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Took 12.58 seconds to spawn the instance on the hypervisor. [ 662.676698] env[69982]: DEBUG nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.679756] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 662.679756] env[69982]: value = "task-3864052" [ 662.679756] env[69982]: _type = "Task" [ 662.679756] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.679756] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae937bf4-7326-4e8b-8c0e-83fd2bb86947 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.697456] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.714051] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Releasing lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.715027] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Instance network_info: |[{"id": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "address": "fa:16:3e:b7:54:77", "network": {"id": "cc0fd9bc-db9f-4bd5-a4cf-197e8f8c0253", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1842391413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43849a9ac5c349d29082fc270a8afd0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79537ce6-7e", "ovs_interfaceid": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 662.715168] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:54:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79537ce6-7e7e-4621-b2a8-e38c01d51f7d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.730325] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Creating folder: Project (43849a9ac5c349d29082fc270a8afd0f). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.730675] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17baf3c2-66d4-495c-97fe-b0ca66a0c8a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.746040] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Created folder: Project (43849a9ac5c349d29082fc270a8afd0f) in parent group-v767796. [ 662.746287] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Creating folder: Instances. Parent ref: group-v767821. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.746550] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0154f88-83ba-4b5f-8411-9fe137c38720 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.754644] env[69982]: DEBUG nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Received event network-changed-9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.754644] env[69982]: DEBUG nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Refreshing instance network info cache due to event network-changed-9e46d480-3573-461c-a8d3-0a5bc4944687. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 662.754644] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Acquiring lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.754644] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Acquired lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.754644] env[69982]: DEBUG nova.network.neutron [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Refreshing network info cache for port 9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 662.764955] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Created folder: Instances in parent group-v767821. [ 662.765302] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 662.765490] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.766042] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-389096c8-6723-4bb5-ad91-4b096b81d563 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.803520] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e968d3-4dab-cd0d-4838-083386eef443, 'name': SearchDatastore_Task, 'duration_secs': 0.012239} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.805849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.805849] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 662.805849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.805849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.806076] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.806195] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.806195] env[69982]: value = "task-3864055" [ 662.806195] env[69982]: _type = "Task" [ 662.806195] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.807847] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c0fce51-4680-40af-a4e1-df1aa53cf5af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.820031] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864055, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.823433] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.823433] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.824061] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee79e349-cc73-4f1c-8326-e2514768d03e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.833581] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 662.833581] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d683ff-31d8-ea14-acef-cd08d8a01d86" [ 662.833581] env[69982]: _type = "Task" [ 662.833581] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.844881] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d683ff-31d8-ea14-acef-cd08d8a01d86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.857039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.488s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.859115] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 662.860454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.005s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.862314] env[69982]: INFO nova.compute.claims [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.940421] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "049c7405-3daf-4064-8770-efbbf15c832e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.940670] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.060515] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 663.100898] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.101122] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.191071] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.207684] env[69982]: INFO nova.compute.manager [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Took 20.77 seconds to build instance. [ 663.322966] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864055, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.350960] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d683ff-31d8-ea14-acef-cd08d8a01d86, 'name': SearchDatastore_Task, 'duration_secs': 0.045819} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.352117] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f62aaeea-381c-4224-b916-959a82c1245a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.366540] env[69982]: DEBUG nova.compute.utils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.368435] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 663.368435] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5242e7f9-1115-6e54-dfd7-c254d766461e" [ 663.368435] env[69982]: _type = "Task" [ 663.368435] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.372802] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.372802] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.387511] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5242e7f9-1115-6e54-dfd7-c254d766461e, 'name': SearchDatastore_Task, 'duration_secs': 0.011698} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.387932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.388358] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6de35617-22cf-4a32-8651-28ea67532b8f/6de35617-22cf-4a32-8651-28ea67532b8f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.390362] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf1e34f3-72f1-44d1-be0f-c1bfc69b7a30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.399909] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 663.399909] env[69982]: value = "task-3864060" [ 663.399909] env[69982]: _type = "Task" [ 663.399909] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.409962] env[69982]: DEBUG nova.network.neutron [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Updating instance_info_cache with network_info: [{"id": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "address": "fa:16:3e:08:e8:93", "network": {"id": "aeb8c5e0-37e2-4f42-a8c0-b7a2a588799d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-751477280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fbd8eef253884e57bb88beed7b50557a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap584c8c37-6f", "ovs_interfaceid": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.422292] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.507465] env[69982]: DEBUG nova.policy [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093054e7b12b4e13b1b90d9961e0e202', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fe14238478147f8bab643b39a1bcb34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.698406] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864052, 'name': ReconfigVM_Task, 'duration_secs': 0.779398} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.699165] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 743a4a52-ce35-4ec1-9286-e0c470e87186/743a4a52-ce35-4ec1-9286-e0c470e87186.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.699871] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d511177-204d-4205-b1ff-54c9db9bf1b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.711196] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 663.711196] env[69982]: value = "task-3864061" [ 663.711196] env[69982]: _type = "Task" [ 663.711196] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.711196] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ac76e8ff-d857-4ad3-83fa-233618ca4325 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.284s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.726119] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864061, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.823952] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864055, 'name': CreateVM_Task, 'duration_secs': 0.878508} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.824316] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.825028] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.825161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.826352] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.826656] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab19c246-74fb-4080-ba90-86cc239ce72a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.837917] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 663.837917] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52266a1c-67e2-de23-159c-4d8594b9c8a4" [ 663.837917] env[69982]: _type = "Task" [ 663.837917] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.849662] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52266a1c-67e2-de23-159c-4d8594b9c8a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.862493] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.862914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.863107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.863593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.863659] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.866704] env[69982]: INFO nova.compute.manager [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Terminating instance [ 663.872106] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 663.914777] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511829} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.915136] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6de35617-22cf-4a32-8651-28ea67532b8f/6de35617-22cf-4a32-8651-28ea67532b8f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.915426] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 663.915767] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-74fc04c9-5713-4a37-9afd-18e3006b29a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.923542] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Releasing lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.923891] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Instance network_info: |[{"id": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "address": "fa:16:3e:08:e8:93", "network": {"id": "aeb8c5e0-37e2-4f42-a8c0-b7a2a588799d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-751477280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "fbd8eef253884e57bb88beed7b50557a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap584c8c37-6f", "ovs_interfaceid": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.925786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:e8:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '584c8c37-6f38-42cb-853e-a6a8f1135e96', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.934183] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Creating folder: Project (fbd8eef253884e57bb88beed7b50557a). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.938551] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0efdb1fc-287f-4ed5-ae33-bb4e3b3b8578 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.944953] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 663.944953] env[69982]: value = "task-3864062" [ 663.944953] env[69982]: _type = "Task" [ 663.944953] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.955517] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Created folder: Project (fbd8eef253884e57bb88beed7b50557a) in parent group-v767796. [ 663.955791] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Creating folder: Instances. Parent ref: group-v767827. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 663.959470] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b50f0fb-cdc8-49e0-aec5-3a23e6004771 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.962897] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.974982] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Created folder: Instances in parent group-v767827. [ 663.974982] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.975227] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.975493] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35f82a1f-9e64-41eb-9910-929b2d67862f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.004017] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.004017] env[69982]: value = "task-3864065" [ 664.004017] env[69982]: _type = "Task" [ 664.004017] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.014610] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864065, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.227045] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.229863] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864061, 'name': Rename_Task, 'duration_secs': 0.33093} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.233046] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 664.234830] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98a2cc03-99b6-4b8c-b928-180f4f903aa5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.245061] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 664.245061] env[69982]: value = "task-3864066" [ 664.245061] env[69982]: _type = "Task" [ 664.245061] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.261707] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864066, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.270485] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a88e41-56c2-4b1f-807b-07e8306e71fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.278883] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834b7a77-8505-4915-976e-38f038fb18e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.289016] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.289213] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing instance network info cache due to event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 664.290466] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.290466] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.290466] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 664.325106] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Successfully created port: 066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.328353] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6347956c-c6d0-459b-b8b0-d50ceb3eb8e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.339247] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff02571-f87b-4f67-9e8e-5ccc37224407 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.372272] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52266a1c-67e2-de23-159c-4d8594b9c8a4, 'name': SearchDatastore_Task, 'duration_secs': 0.056506} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.373310] env[69982]: DEBUG nova.compute.provider_tree [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.374659] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.374876] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.375116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.375284] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.375509] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.375836] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8edb679-4c80-4ef6-9a31-196298542763 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.379072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "refresh_cache-3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.379334] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquired lock "refresh_cache-3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.379524] env[69982]: DEBUG nova.network.neutron [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 664.391242] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.391242] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.392599] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-586b12be-6213-4c95-af26-f7f3fdd7fbfd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.400995] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 664.400995] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b2ae0f-6358-279b-e496-c846019ceb17" [ 664.400995] env[69982]: _type = "Task" [ 664.400995] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.412695] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b2ae0f-6358-279b-e496-c846019ceb17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.459777] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.521103] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864065, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.755649] env[69982]: DEBUG nova.network.neutron [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Updated VIF entry in instance network info cache for port 9e46d480-3573-461c-a8d3-0a5bc4944687. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 664.755992] env[69982]: DEBUG nova.network.neutron [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Updating instance_info_cache with network_info: [{"id": "9e46d480-3573-461c-a8d3-0a5bc4944687", "address": "fa:16:3e:78:a4:59", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e46d480-35", "ovs_interfaceid": "9e46d480-3573-461c-a8d3-0a5bc4944687", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.763751] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864066, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.765141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.880534] env[69982]: DEBUG nova.scheduler.client.report [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.898372] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 664.917085] env[69982]: DEBUG nova.network.neutron [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 664.931170] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b2ae0f-6358-279b-e496-c846019ceb17, 'name': SearchDatastore_Task, 'duration_secs': 0.025796} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.932835] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e087dcc3-e446-4ace-8ffc-c62b49a70f1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.940020] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:05:16Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='275905323',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-427882332',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 664.940020] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.940020] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 664.940280] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.940280] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 664.940433] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 664.940757] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 664.941050] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 664.941468] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 664.943279] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 664.943279] env[69982]: DEBUG nova.virt.hardware [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 664.943279] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59888446-a725-46c7-b720-c0370c185709 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.948479] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 664.948479] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9b85c-b2d6-5184-21af-5776b81a6220" [ 664.948479] env[69982]: _type = "Task" [ 664.948479] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.960794] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca49c60-ddff-4f60-8dc6-3d4283b26bbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.972584] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.697479} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.972584] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.972880] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c597744e-bd19-4672-ac14-d11ce2865ace {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.990776] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9b85c-b2d6-5184-21af-5776b81a6220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.013702] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 6de35617-22cf-4a32-8651-28ea67532b8f/6de35617-22cf-4a32-8651-28ea67532b8f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.014111] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-733b698e-b5a5-48c8-b9d0-109b2e80e442 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.040034] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864065, 'name': CreateVM_Task, 'duration_secs': 0.708978} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.041263] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.041611] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 665.041611] env[69982]: value = "task-3864067" [ 665.041611] env[69982]: _type = "Task" [ 665.041611] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.042276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.042492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.042775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.043107] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e385bd7-bcd8-4286-9bc0-215dc22ae561 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.052024] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 665.052024] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b091d7-8931-bae7-d1d4-47f42f3ba95c" [ 665.052024] env[69982]: _type = "Task" [ 665.052024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.055837] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.067083] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b091d7-8931-bae7-d1d4-47f42f3ba95c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.083581] env[69982]: DEBUG nova.network.neutron [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.199560] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Successfully updated port: f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 665.259196] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Releasing lock "refresh_cache-5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.260232] env[69982]: DEBUG nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Received event network-vif-deleted-b089582f-8b97-4630-8a47-45009013aceb {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.260614] env[69982]: DEBUG nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Received event network-vif-plugged-79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.261086] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.261472] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.262118] env[69982]: DEBUG oslo_concurrency.lockutils [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.262813] env[69982]: DEBUG nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] No waiting events found dispatching network-vif-plugged-79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.263316] env[69982]: WARNING nova.compute.manager [req-25efd099-ced7-4462-8d51-38b5f8bf4672 req-111bc5db-dd77-48d1-b8ec-06d38c74cf29 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Received unexpected event network-vif-plugged-79537ce6-7e7e-4621-b2a8-e38c01d51f7d for instance with vm_state building and task_state spawning. [ 665.275370] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864066, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.391732] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.392641] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.398576] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.736s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.398628] env[69982]: DEBUG nova.objects.instance [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lazy-loading 'resources' on Instance uuid d0114728-9d44-4700-86a9-175e5f840b1d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 665.475721] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9b85c-b2d6-5184-21af-5776b81a6220, 'name': SearchDatastore_Task, 'duration_secs': 0.042376} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.476145] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.476531] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a8217447-bc22-4b84-925f-c3c09fb7228c/a8217447-bc22-4b84-925f-c3c09fb7228c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 665.476977] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f557150-a828-4711-8eb8-3d8504a7f612 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.495731] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 665.495731] env[69982]: value = "task-3864068" [ 665.495731] env[69982]: _type = "Task" [ 665.495731] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.509664] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.558461] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864067, 'name': ReconfigVM_Task, 'duration_secs': 0.349739} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.567584] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 6de35617-22cf-4a32-8651-28ea67532b8f/6de35617-22cf-4a32-8651-28ea67532b8f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.568653] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f5e3b2a-4d5e-4d76-82e2-5fb104d6b456 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.577608] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b091d7-8931-bae7-d1d4-47f42f3ba95c, 'name': SearchDatastore_Task, 'duration_secs': 0.015457} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.579345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.580016] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.580016] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.580016] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.580170] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.580446] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 665.580446] env[69982]: value = "task-3864069" [ 665.580446] env[69982]: _type = "Task" [ 665.580446] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.580652] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59a2d353-319a-48b2-ad82-979da05406fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.589648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Releasing lock "refresh_cache-3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.590175] env[69982]: DEBUG nova.compute.manager [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 665.590430] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.595451] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b1eeead-c987-448d-a4f1-217ee5b3727e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.599272] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864069, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.600818] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.601140] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.602356] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa9f278f-48a5-46af-9013-aed1fe490eee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.608044] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.608763] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3f255bf-3f57-4b50-992a-ee3f766cdc6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.613009] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 665.613009] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e9614c-06ca-a103-f69c-dc94f20910e3" [ 665.613009] env[69982]: _type = "Task" [ 665.613009] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.619622] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 665.619622] env[69982]: value = "task-3864070" [ 665.619622] env[69982]: _type = "Task" [ 665.619622] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.631560] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e9614c-06ca-a103-f69c-dc94f20910e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.639771] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3864070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.685731] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updated VIF entry in instance network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 665.686148] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.704662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.706146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.706146] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 665.761731] env[69982]: DEBUG oslo_vmware.api [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864066, 'name': PowerOnVM_Task, 'duration_secs': 1.240579} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.762012] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 665.762290] env[69982]: INFO nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Took 13.24 seconds to spawn the instance on the hypervisor. [ 665.762487] env[69982]: DEBUG nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 665.763409] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e4a842-0105-4a6a-8be7-4147da5aedff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.904673] env[69982]: DEBUG nova.compute.utils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.918731] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.919015] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.964393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.964644] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.012964] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864068, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.083699] env[69982]: DEBUG nova.policy [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a66d97f6d8314b8d9c7bedd72ba4e00b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6735142244e9499083c4f9b74f187369', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 666.105434] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864069, 'name': Rename_Task, 'duration_secs': 0.267397} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.105666] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.105938] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-442e1497-1819-41ae-9d02-cea236695871 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.128378] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 666.128378] env[69982]: value = "task-3864071" [ 666.128378] env[69982]: _type = "Task" [ 666.128378] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.143307] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864071, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.148192] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3864070, 'name': PowerOffVM_Task, 'duration_secs': 0.242913} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.152269] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.152518] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.152836] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e9614c-06ca-a103-f69c-dc94f20910e3, 'name': SearchDatastore_Task, 'duration_secs': 0.025123} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.153066] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c090a6d4-7ad8-4ffe-b8e6-a64df046bc6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.158644] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4794ea4-d1d4-4e2e-94e3-af63b447f29e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.175370] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 666.175370] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52100c3c-f892-731e-bc4a-95e4dfd372d1" [ 666.175370] env[69982]: _type = "Task" [ 666.175370] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.190951] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.191346] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Received event network-vif-plugged-2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.191549] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquiring lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.191890] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.192186] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.193986] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] No waiting events found dispatching network-vif-plugged-2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 666.193986] env[69982]: WARNING nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Received unexpected event network-vif-plugged-2819d991-2637-403b-b564-70e27a846f61 for instance with vm_state building and task_state spawning. [ 666.193986] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Received event network-changed-2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.193986] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Refreshing instance network info cache due to event network-changed-2819d991-2637-403b-b564-70e27a846f61. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 666.193986] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquiring lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.194290] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquired lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.194290] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Refreshing network info cache for port 2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 666.199659] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52100c3c-f892-731e-bc4a-95e4dfd372d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.214432] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.214796] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.214888] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleting the datastore file [datastore1] 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.219278] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e8448bd-6a04-4194-8ebc-0801814bf931 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.233841] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for the task: (returnval){ [ 666.233841] env[69982]: value = "task-3864073" [ 666.233841] env[69982]: _type = "Task" [ 666.233841] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.246057] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3864073, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.285682] env[69982]: INFO nova.compute.manager [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Took 22.78 seconds to build instance. [ 666.331536] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Successfully updated port: 066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.335964] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 666.368133] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a90fb4d-e7e6-4945-affe-205d7bfc2dba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.385322] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302f58e6-5cd0-4521-98a6-bf233c454d2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.431641] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.438576] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73535862-37e1-486c-9df5-556958584111 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.450772] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b06c159-7a3a-4a89-a88a-daeb6bc17ada {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.479019] env[69982]: DEBUG nova.compute.provider_tree [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.517810] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.778162} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.517810] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a8217447-bc22-4b84-925f-c3c09fb7228c/a8217447-bc22-4b84-925f-c3c09fb7228c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 666.518152] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 666.518816] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15f9b957-3637-4522-bb7c-469b5e8132d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.533709] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 666.533709] env[69982]: value = "task-3864075" [ 666.533709] env[69982]: _type = "Task" [ 666.533709] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.544810] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864075, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.649168] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864071, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.667490] env[69982]: DEBUG nova.compute.manager [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Received event network-changed-79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 666.667737] env[69982]: DEBUG nova.compute.manager [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Refreshing instance network info cache due to event network-changed-79537ce6-7e7e-4621-b2a8-e38c01d51f7d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 666.667911] env[69982]: DEBUG oslo_concurrency.lockutils [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] Acquiring lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.668392] env[69982]: DEBUG oslo_concurrency.lockutils [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] Acquired lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.668392] env[69982]: DEBUG nova.network.neutron [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Refreshing network info cache for port 79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 666.691437] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52100c3c-f892-731e-bc4a-95e4dfd372d1, 'name': SearchDatastore_Task, 'duration_secs': 0.023415} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.691906] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.692317] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 303c7ee1-8d26-460b-aab9-d55c71cf8a73/303c7ee1-8d26-460b-aab9-d55c71cf8a73.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.693742] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7b258904-d246-4be0-bb2f-bbadd8460bdb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.705892] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 666.705892] env[69982]: value = "task-3864076" [ 666.705892] env[69982]: _type = "Task" [ 666.705892] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.724022] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864076, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.751234] env[69982]: DEBUG oslo_vmware.api [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Task: {'id': task-3864073, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.353379} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.751234] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.751234] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.751234] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.751234] env[69982]: INFO nova.compute.manager [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 666.751664] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.752540] env[69982]: DEBUG nova.compute.manager [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 666.752540] env[69982]: DEBUG nova.network.neutron [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 666.794764] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c8c0c65c-818d-4b90-9023-93b99fa9199a tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.304s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.838764] env[69982]: DEBUG nova.network.neutron [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 666.838764] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.838764] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.838764] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 666.887124] env[69982]: DEBUG nova.network.neutron [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Updating instance_info_cache with network_info: [{"id": "f542e5e0-ebab-4967-94f1-8625e905193e", "address": "fa:16:3e:58:1c:6c", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf542e5e0-eb", "ovs_interfaceid": "f542e5e0-ebab-4967-94f1-8625e905193e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.981644] env[69982]: DEBUG nova.scheduler.client.report [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.049231] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864075, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113555} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.049775] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 667.051010] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b47cd3e-96ad-4471-99b5-e967532dc56e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.082852] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] a8217447-bc22-4b84-925f-c3c09fb7228c/a8217447-bc22-4b84-925f-c3c09fb7228c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 667.083390] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba9d4a21-3e88-453e-8f8f-cc9c3d92eba8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.113054] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 667.113054] env[69982]: value = "task-3864077" [ 667.113054] env[69982]: _type = "Task" [ 667.113054] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.127448] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.141713] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "589419ea-c609-45bb-bde5-3b22d9ff111e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.142701] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.151321] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864071, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.218711] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864076, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.303023] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 667.327216] env[69982]: DEBUG nova.network.neutron [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.391738] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.391992] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Instance network_info: |[{"id": "f542e5e0-ebab-4967-94f1-8625e905193e", "address": "fa:16:3e:58:1c:6c", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf542e5e0-eb", "ovs_interfaceid": "f542e5e0-ebab-4967-94f1-8625e905193e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 667.393383] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:1c:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f542e5e0-ebab-4967-94f1-8625e905193e', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.404523] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.405246] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.405858] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-438d6dd0-3f4e-42b4-9123-69b2f294063c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.435897] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.435897] env[69982]: value = "task-3864078" [ 667.435897] env[69982]: _type = "Task" [ 667.435897] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.447451] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.449840] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864078, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.462813] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 667.485106] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.485395] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.485555] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.485737] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.485886] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.486231] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.486428] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.486592] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.486932] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.486994] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.487209] env[69982]: DEBUG nova.virt.hardware [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.488508] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2ff4f1-12e3-4ae7-a549-27c48a36ff61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.494823] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.498266] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.723s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.500364] env[69982]: INFO nova.compute.claims [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 667.512972] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6aba0a-f414-4472-93d7-da115a5f1739 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.541261] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updated VIF entry in instance network info cache for port 2819d991-2637-403b-b564-70e27a846f61. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 667.541791] env[69982]: DEBUG nova.network.neutron [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updating instance_info_cache with network_info: [{"id": "2819d991-2637-403b-b564-70e27a846f61", "address": "fa:16:3e:dd:be:99", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2819d991-26", "ovs_interfaceid": "2819d991-2637-403b-b564-70e27a846f61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.551207] env[69982]: INFO nova.scheduler.client.report [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Deleted allocations for instance d0114728-9d44-4700-86a9-175e5f840b1d [ 667.628280] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.646403] env[69982]: DEBUG oslo_vmware.api [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864071, 'name': PowerOnVM_Task, 'duration_secs': 1.102314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.652293] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.652293] env[69982]: INFO nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Took 12.80 seconds to spawn the instance on the hypervisor. [ 667.652293] env[69982]: DEBUG nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.652293] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6900cee-9dcb-414e-9f94-a8dd48c673c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.721706] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864076, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.952263} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.722097] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 303c7ee1-8d26-460b-aab9-d55c71cf8a73/303c7ee1-8d26-460b-aab9-d55c71cf8a73.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.722351] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.722639] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b37dcc7f-018a-4b56-abcc-f464fd5a046c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.732886] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 667.732886] env[69982]: value = "task-3864079" [ 667.732886] env[69982]: _type = "Task" [ 667.732886] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.745119] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864079, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.748729] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Successfully created port: c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 667.830223] env[69982]: INFO nova.compute.manager [-] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Took 1.08 seconds to deallocate network for instance. [ 667.838015] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.856238] env[69982]: DEBUG nova.network.neutron [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updating instance_info_cache with network_info: [{"id": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "address": "fa:16:3e:e9:6e:ee", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap066271e7-f0", "ovs_interfaceid": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.951703] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864078, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.988317] env[69982]: DEBUG nova.network.neutron [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updated VIF entry in instance network info cache for port 79537ce6-7e7e-4621-b2a8-e38c01d51f7d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 667.988879] env[69982]: DEBUG nova.network.neutron [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating instance_info_cache with network_info: [{"id": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "address": "fa:16:3e:b7:54:77", "network": {"id": "cc0fd9bc-db9f-4bd5-a4cf-197e8f8c0253", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1842391413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43849a9ac5c349d29082fc270a8afd0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79537ce6-7e", "ovs_interfaceid": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.047908] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Releasing lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.047908] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Received event network-vif-plugged-584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 668.047908] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Acquiring lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.047908] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.048112] env[69982]: DEBUG oslo_concurrency.lockutils [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.048288] env[69982]: DEBUG nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] No waiting events found dispatching network-vif-plugged-584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 668.048417] env[69982]: WARNING nova.compute.manager [req-ac44dfb9-d66e-46c0-bb61-15ea7905b422 req-5d006254-d263-45dc-a883-fb8626b49bb7 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Received unexpected event network-vif-plugged-584c8c37-6f38-42cb-853e-a6a8f1135e96 for instance with vm_state building and task_state spawning. [ 668.060253] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43dc1f91-f55e-40b6-ac2d-a0b9b96a56df tempest-DeleteServersAdminTestJSON-885181829 tempest-DeleteServersAdminTestJSON-885181829-project-admin] Lock "d0114728-9d44-4700-86a9-175e5f840b1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.447s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.134592] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864077, 'name': ReconfigVM_Task, 'duration_secs': 0.90675} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.134886] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfigured VM instance instance-00000009 to attach disk [datastore2] a8217447-bc22-4b84-925f-c3c09fb7228c/a8217447-bc22-4b84-925f-c3c09fb7228c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 668.135578] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ac4683a-ee3c-4638-9f3c-a73dfdfe649e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.150679] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Received event network-changed-584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 668.150906] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Refreshing instance network info cache due to event network-changed-584c8c37-6f38-42cb-853e-a6a8f1135e96. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 668.151283] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquiring lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.151283] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquired lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.151443] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Refreshing network info cache for port 584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 668.154780] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 668.154780] env[69982]: value = "task-3864080" [ 668.154780] env[69982]: _type = "Task" [ 668.154780] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.180249] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864080, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.182818] env[69982]: INFO nova.compute.manager [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Took 17.72 seconds to build instance. [ 668.244412] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864079, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171766} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.244412] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.245139] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d1d050-d1d7-4277-afdc-9adc94783378 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.271312] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 303c7ee1-8d26-460b-aab9-d55c71cf8a73/303c7ee1-8d26-460b-aab9-d55c71cf8a73.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.271593] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00832c27-e082-4857-9e79-c361c143059b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.296553] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 668.296553] env[69982]: value = "task-3864081" [ 668.296553] env[69982]: _type = "Task" [ 668.296553] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.314474] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864081, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.341326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.358581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.358953] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Instance network_info: |[{"id": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "address": "fa:16:3e:e9:6e:ee", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap066271e7-f0", "ovs_interfaceid": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 668.359417] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:6e:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad72c645-a67d-4efd-b563-28e44077e68d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '066271e7-f03a-48d7-a4a4-df17ef2b24f4', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.372415] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Creating folder: Project (4fe14238478147f8bab643b39a1bcb34). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.375847] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f071361-255e-4f24-86a7-812e169ad38d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.388478] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Created folder: Project (4fe14238478147f8bab643b39a1bcb34) in parent group-v767796. [ 668.388723] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Creating folder: Instances. Parent ref: group-v767831. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.389211] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a178dfff-9e62-485e-b47b-e996ea856bca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.402884] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Created folder: Instances in parent group-v767831. [ 668.402884] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.402884] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.402884] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7170f9d2-7856-43ab-a232-e79414bdb179 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.430506] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.430506] env[69982]: value = "task-3864084" [ 668.430506] env[69982]: _type = "Task" [ 668.430506] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.448704] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864084, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.465665] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864078, 'name': CreateVM_Task, 'duration_secs': 0.683589} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.465883] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.466639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.466873] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.467277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.467557] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87d00289-0aac-4a6a-bde9-52c69dd9f72e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.474495] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 668.474495] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e4e5c-fa7c-2cd5-d809-43a0b03cf894" [ 668.474495] env[69982]: _type = "Task" [ 668.474495] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.490445] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e4e5c-fa7c-2cd5-d809-43a0b03cf894, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.491774] env[69982]: DEBUG oslo_concurrency.lockutils [req-62172f5f-89ca-4652-9bf6-4c3c49db44c3 req-8433933f-5fb7-419d-82c5-f34cde15f093 service nova] Releasing lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.537279] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.537581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.537788] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.537973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.538195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.541623] env[69982]: INFO nova.compute.manager [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Terminating instance [ 668.668565] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864080, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.685791] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4eac296-77f5-420a-a423-d78bd993af0e tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.240s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.814448] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.904064] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.904315] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.904526] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.904705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.904873] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.907141] env[69982]: INFO nova.compute.manager [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Terminating instance [ 668.934807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae5e547-ce51-48e3-bd44-17b1ad187645 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.955839] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864084, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.957256] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c705738-6f76-4bda-8ba5-02dd22448bcd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.001057] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9baac32-7046-4bd1-a20f-32a9ba62a232 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.011937] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e4e5c-fa7c-2cd5-d809-43a0b03cf894, 'name': SearchDatastore_Task, 'duration_secs': 0.02567} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.015454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.015740] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.016439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.016439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.016439] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.016732] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02bc73c5-079f-4415-b419-d470665f2432 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.020667] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0c1740-1537-4d09-b668-a849e295ce0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.039520] env[69982]: DEBUG nova.compute.provider_tree [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.047101] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.048044] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.049046] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbec064d-e9d6-47fe-a4f8-b233a9ce7130 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.053270] env[69982]: DEBUG nova.compute.manager [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 669.053731] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 669.055376] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ce0ebe-e4c1-4e01-9a17-acc1b8605105 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.063853] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 669.063853] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b13a0d-ed8e-4b41-f64f-f0755b1438e8" [ 669.063853] env[69982]: _type = "Task" [ 669.063853] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.070768] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.071709] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-108b194d-30a1-4e56-a4ea-43a03398221b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.077404] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b13a0d-ed8e-4b41-f64f-f0755b1438e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.090909] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 669.090909] env[69982]: value = "task-3864086" [ 669.090909] env[69982]: _type = "Task" [ 669.090909] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.103179] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.168499] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864080, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.193953] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.226075] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Updated VIF entry in instance network info cache for port 584c8c37-6f38-42cb-853e-a6a8f1135e96. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 669.228310] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Updating instance_info_cache with network_info: [{"id": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "address": "fa:16:3e:08:e8:93", "network": {"id": "aeb8c5e0-37e2-4f42-a8c0-b7a2a588799d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-751477280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbd8eef253884e57bb88beed7b50557a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap584c8c37-6f", "ovs_interfaceid": "584c8c37-6f38-42cb-853e-a6a8f1135e96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.323957] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864081, 'name': ReconfigVM_Task, 'duration_secs': 0.807938} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.328469] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 303c7ee1-8d26-460b-aab9-d55c71cf8a73/303c7ee1-8d26-460b-aab9-d55c71cf8a73.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.330776] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d7fd0e4-c205-4e5a-971b-3ca4ba75a992 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.339716] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 669.339716] env[69982]: value = "task-3864087" [ 669.339716] env[69982]: _type = "Task" [ 669.339716] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.357255] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864087, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.416867] env[69982]: DEBUG nova.compute.manager [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 669.417129] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 669.418062] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae76262-88db-4149-8673-360ba31de128 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.432357] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 669.432684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38d1eba7-3a24-4fe2-9d3b-851e71a25616 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.444529] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864084, 'name': CreateVM_Task, 'duration_secs': 0.697052} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.446058] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.447463] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 669.447463] env[69982]: value = "task-3864088" [ 669.447463] env[69982]: _type = "Task" [ 669.447463] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.447463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.447858] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.447858] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.448960] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-588617b2-c93f-445e-a401-d9cb19aedfcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.465507] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.465866] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 669.465866] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52629195-984e-e437-d9af-cac6964cbf42" [ 669.465866] env[69982]: _type = "Task" [ 669.465866] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.476131] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52629195-984e-e437-d9af-cac6964cbf42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.551492] env[69982]: DEBUG nova.scheduler.client.report [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.582553] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b13a0d-ed8e-4b41-f64f-f0755b1438e8, 'name': SearchDatastore_Task, 'duration_secs': 0.014607} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.585462] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b33d36-85ee-491f-832a-7563213cd550 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.610686] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 669.610686] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52915928-7601-c37b-7565-486c697decef" [ 669.610686] env[69982]: _type = "Task" [ 669.610686] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.611552] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864086, 'name': PowerOffVM_Task, 'duration_secs': 0.265074} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.611552] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.611679] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.619050] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-925f9f31-8878-40c1-b497-278d404d6992 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.628191] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52915928-7601-c37b-7565-486c697decef, 'name': SearchDatastore_Task, 'duration_secs': 0.013375} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.628479] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.628787] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] bba73604-c54f-4643-9e4c-326b643b3d51/bba73604-c54f-4643-9e4c-326b643b3d51.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.629199] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31a4f4dc-0665-48df-bf62-9248bb4e52be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.637377] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 669.637377] env[69982]: value = "task-3864090" [ 669.637377] env[69982]: _type = "Task" [ 669.637377] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.647729] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.669415] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864080, 'name': Rename_Task, 'duration_secs': 1.044087} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.669533] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.669805] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3df9d68a-0623-4462-9755-27899119f2b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.677526] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 669.677526] env[69982]: value = "task-3864091" [ 669.677526] env[69982]: _type = "Task" [ 669.677526] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.687130] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.703223] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 669.703963] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 669.703963] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore2] 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 669.706329] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01737f85-3a81-4d81-b2e3-1cc9f61c4220 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.714883] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 669.714883] env[69982]: value = "task-3864092" [ 669.714883] env[69982]: _type = "Task" [ 669.714883] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.726304] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864092, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.727410] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.731348] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Releasing lock "refresh_cache-303c7ee1-8d26-460b-aab9-d55c71cf8a73" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.731348] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Received event network-vif-plugged-f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.731490] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquiring lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.731733] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.731942] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.732144] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] No waiting events found dispatching network-vif-plugged-f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 669.732317] env[69982]: WARNING nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Received unexpected event network-vif-plugged-f542e5e0-ebab-4967-94f1-8625e905193e for instance with vm_state building and task_state spawning. [ 669.732474] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Received event network-changed-f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.732694] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Refreshing instance network info cache due to event network-changed-f542e5e0-ebab-4967-94f1-8625e905193e. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.732914] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquiring lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.733088] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquired lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.733248] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Refreshing network info cache for port f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 669.854655] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864087, 'name': Rename_Task, 'duration_secs': 0.234696} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.854655] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.854655] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e8a9cf2-3bd8-4207-9e8f-1db7ffa3dd54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.864342] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 669.864342] env[69982]: value = "task-3864093" [ 669.864342] env[69982]: _type = "Task" [ 669.864342] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.874667] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.932398] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Successfully updated port: c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 669.961483] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864088, 'name': PowerOffVM_Task, 'duration_secs': 0.25034} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.962968] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 669.963229] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 669.963526] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7dea3474-dfd0-409b-94c4-140334dd1233 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.980381] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52629195-984e-e437-d9af-cac6964cbf42, 'name': SearchDatastore_Task, 'duration_secs': 0.017427} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.980737] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.980988] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.981306] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.981480] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.982040] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.982040] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff426062-ad65-4b5e-967a-d098c3d55d16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.997853] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.998085] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.001225] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43fc6235-0d15-4bfe-aeb1-907647888d55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.006800] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 670.006800] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52564da4-0303-bd38-4315-3c27ad300c79" [ 670.006800] env[69982]: _type = "Task" [ 670.006800] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.020646] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52564da4-0303-bd38-4315-3c27ad300c79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.049224] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 670.049494] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 670.050060] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Deleting the datastore file [datastore2] e82ae1bd-c31b-44ca-9608-9348b8eac8dc {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 670.051074] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a4ae875-7733-4bc5-9056-36705d472c76 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.059303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.059987] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 670.065529] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.090s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.067171] env[69982]: INFO nova.compute.claims [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.070149] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for the task: (returnval){ [ 670.070149] env[69982]: value = "task-3864095" [ 670.070149] env[69982]: _type = "Task" [ 670.070149] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.080064] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "9d1b0a5f-e096-4641-a077-f0949135efbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.080064] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.085406] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.151454] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864090, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.194798] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864091, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.232782] env[69982]: DEBUG oslo_vmware.api [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864092, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220774} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.232782] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.232934] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.233117] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.233273] env[69982]: INFO nova.compute.manager [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Took 1.18 seconds to destroy the instance on the hypervisor. [ 670.233560] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.233779] env[69982]: DEBUG nova.compute.manager [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.233889] env[69982]: DEBUG nova.network.neutron [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 670.364789] env[69982]: DEBUG nova.compute.manager [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Received event network-vif-plugged-066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.364789] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Acquiring lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.364964] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.365968] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.365968] env[69982]: DEBUG nova.compute.manager [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] No waiting events found dispatching network-vif-plugged-066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 670.365968] env[69982]: WARNING nova.compute.manager [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Received unexpected event network-vif-plugged-066271e7-f03a-48d7-a4a4-df17ef2b24f4 for instance with vm_state building and task_state spawning. [ 670.365968] env[69982]: DEBUG nova.compute.manager [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Received event network-changed-066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.365968] env[69982]: DEBUG nova.compute.manager [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Refreshing instance network info cache due to event network-changed-066271e7-f03a-48d7-a4a4-df17ef2b24f4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 670.366546] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Acquiring lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.366546] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Acquired lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.366546] env[69982]: DEBUG nova.network.neutron [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Refreshing network info cache for port 066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 670.384832] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864093, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.436303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.436463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquired lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.436622] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 670.523558] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52564da4-0303-bd38-4315-3c27ad300c79, 'name': SearchDatastore_Task, 'duration_secs': 0.054783} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.524600] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5ccbc72-b8b3-4fa5-8ecf-86413ad4b804 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.531609] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 670.531609] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524dd820-37c2-dcb1-3a67-8288fd3ad159" [ 670.531609] env[69982]: _type = "Task" [ 670.531609] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.545641] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524dd820-37c2-dcb1-3a67-8288fd3ad159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.574921] env[69982]: DEBUG nova.compute.utils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 670.575492] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 670.575749] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 670.596177] env[69982]: DEBUG oslo_vmware.api [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Task: {'id': task-3864095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360069} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.597372] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 670.597648] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 670.598481] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 670.598481] env[69982]: INFO nova.compute.manager [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Took 1.18 seconds to destroy the instance on the hypervisor. [ 670.598481] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.599799] env[69982]: DEBUG nova.compute.manager [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 670.599799] env[69982]: DEBUG nova.network.neutron [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 670.656202] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.655758} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.656575] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] bba73604-c54f-4643-9e4c-326b643b3d51/bba73604-c54f-4643-9e4c-326b643b3d51.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 670.657218] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 670.657218] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c543f976-d096-4d4f-80a7-308522d9ef74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.670519] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 670.670519] env[69982]: value = "task-3864097" [ 670.670519] env[69982]: _type = "Task" [ 670.670519] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.681940] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.693852] env[69982]: DEBUG oslo_vmware.api [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864091, 'name': PowerOnVM_Task, 'duration_secs': 0.579581} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.693852] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.694202] env[69982]: INFO nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Took 13.34 seconds to spawn the instance on the hypervisor. [ 670.694652] env[69982]: DEBUG nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.695638] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a259e1-bca5-4334-9231-2662f5fd0f8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.857118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.857118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.882351] env[69982]: DEBUG oslo_vmware.api [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864093, 'name': PowerOnVM_Task, 'duration_secs': 0.860231} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.882760] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.883096] env[69982]: INFO nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 10.96 seconds to spawn the instance on the hypervisor. [ 670.883568] env[69982]: DEBUG nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.884464] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0e29cb-4a41-4ce2-b3a8-0c61da70c7f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.900732] env[69982]: DEBUG nova.policy [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11ce0ddb8cf445f2ab641704393ebe36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e788d70221a4c9c834bd785b689b7f6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 671.028967] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Updated VIF entry in instance network info cache for port f542e5e0-ebab-4967-94f1-8625e905193e. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 671.028967] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Updating instance_info_cache with network_info: [{"id": "f542e5e0-ebab-4967-94f1-8625e905193e", "address": "fa:16:3e:58:1c:6c", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf542e5e0-eb", "ovs_interfaceid": "f542e5e0-ebab-4967-94f1-8625e905193e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.053215] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524dd820-37c2-dcb1-3a67-8288fd3ad159, 'name': SearchDatastore_Task, 'duration_secs': 0.014394} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.054576] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 671.058418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.058786] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 930c8740-5ad1-4491-8dd6-1a568eaa6f62/930c8740-5ad1-4491-8dd6-1a568eaa6f62.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.059920] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f455cbe3-5ce4-4d7e-bcee-8d442389afb7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.070387] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 671.070387] env[69982]: value = "task-3864098" [ 671.070387] env[69982]: _type = "Task" [ 671.070387] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.082444] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.085223] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 671.186435] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085105} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.186744] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 671.187847] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5df574-d37c-47f7-9c96-2cd8fcfd6a72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.216419] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] bba73604-c54f-4643-9e4c-326b643b3d51/bba73604-c54f-4643-9e4c-326b643b3d51.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 671.227279] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d885df99-9eef-48b8-af45-860f3401a490 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.249117] env[69982]: INFO nova.compute.manager [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Took 19.27 seconds to build instance. [ 671.254593] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 671.254593] env[69982]: value = "task-3864099" [ 671.254593] env[69982]: _type = "Task" [ 671.254593] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.268113] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.406881] env[69982]: INFO nova.compute.manager [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 17.74 seconds to build instance. [ 671.531921] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Releasing lock "refresh_cache-bba73604-c54f-4643-9e4c-326b643b3d51" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.532778] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Received event network-changed-dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.532905] env[69982]: DEBUG nova.compute.manager [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Refreshing instance network info cache due to event network-changed-dd67d1de-a902-443e-a6bb-1cc4903f5ccd. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.533999] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquiring lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.533999] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Acquired lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.533999] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Refreshing network info cache for port dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 671.563805] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f42ba2-44d5-4f31-b7f2-f94d806b988b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.580460] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535156d2-39f7-470f-b683-61760747f42b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.595879] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864098, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.628771] env[69982]: DEBUG nova.network.neutron [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Updating instance_info_cache with network_info: [{"id": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "address": "fa:16:3e:ab:2e:39", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d696dd-d8", "ovs_interfaceid": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.630785] env[69982]: DEBUG nova.network.neutron [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updated VIF entry in instance network info cache for port 066271e7-f03a-48d7-a4a4-df17ef2b24f4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 671.631157] env[69982]: DEBUG nova.network.neutron [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updating instance_info_cache with network_info: [{"id": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "address": "fa:16:3e:e9:6e:ee", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap066271e7-f0", "ovs_interfaceid": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.633276] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd0d5d-5c3d-4c9c-a4bc-fd10b7aeb8a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.644812] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f20f277-ae25-4db7-9bf4-5daeeeae4a33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.662994] env[69982]: DEBUG nova.compute.provider_tree [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.751696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79203e47-ab9e-4750-be33-482658c9a0e6 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.781s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.768882] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.822529] env[69982]: DEBUG nova.network.neutron [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.910540] env[69982]: DEBUG oslo_concurrency.lockutils [None req-55dc4b8b-064d-4537-92e4-3fb0b7dd58b6 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.249s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.013080] env[69982]: DEBUG nova.compute.manager [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Received event network-vif-plugged-c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.013663] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Acquiring lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.014534] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.015119] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.015360] env[69982]: DEBUG nova.compute.manager [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] No waiting events found dispatching network-vif-plugged-c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 672.015543] env[69982]: WARNING nova.compute.manager [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Received unexpected event network-vif-plugged-c5d696dd-d8ff-4fcd-9357-3c1510432508 for instance with vm_state building and task_state spawning. [ 672.015702] env[69982]: DEBUG nova.compute.manager [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Received event network-changed-c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.015858] env[69982]: DEBUG nova.compute.manager [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Refreshing instance network info cache due to event network-changed-c5d696dd-d8ff-4fcd-9357-3c1510432508. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 672.016044] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Acquiring lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.089968] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864098, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.097383] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 672.129300] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 672.129300] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.129300] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 672.129631] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.129631] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 672.129631] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 672.129631] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 672.129631] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 672.129802] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 672.129802] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 672.129802] env[69982]: DEBUG nova.virt.hardware [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 672.130306] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079cfea6-5d2a-48bf-9ece-eb680e00d9c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.138214] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Releasing lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.138528] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Instance network_info: |[{"id": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "address": "fa:16:3e:ab:2e:39", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d696dd-d8", "ovs_interfaceid": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.138977] env[69982]: DEBUG oslo_concurrency.lockutils [req-0952633a-a705-489f-a2a1-060aa80fc2b4 req-866e1ac1-e162-4ecb-8749-9cca252bd0f7 service nova] Releasing lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.139634] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Acquired lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.139843] env[69982]: DEBUG nova.network.neutron [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Refreshing network info cache for port c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 672.141356] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:2e:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5d696dd-d8ff-4fcd-9357-3c1510432508', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.149795] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Creating folder: Project (6735142244e9499083c4f9b74f187369). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.152383] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9a48d2-0894-4ddc-a4c8-465c045ab30a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.157124] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64ad61d3-5a56-4c41-a27a-593316e7d1bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.172757] env[69982]: DEBUG nova.scheduler.client.report [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 672.179567] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Created folder: Project (6735142244e9499083c4f9b74f187369) in parent group-v767796. [ 672.179874] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Creating folder: Instances. Parent ref: group-v767834. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.180148] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f1ecc1f-7709-43d7-917c-7207c3df4d05 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.255459] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.268701] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864099, 'name': ReconfigVM_Task, 'duration_secs': 0.744779} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.269562] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Reconfigured VM instance instance-0000000b to attach disk [datastore1] bba73604-c54f-4643-9e4c-326b643b3d51/bba73604-c54f-4643-9e4c-326b643b3d51.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 672.271328] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e7df8be-93c6-484d-bb94-10b02f0cf748 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.273515] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Created folder: Instances in parent group-v767834. [ 672.273875] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.274195] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.275123] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57354128-514c-4cff-8ce4-23587b0d66a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.293816] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 672.293816] env[69982]: value = "task-3864102" [ 672.293816] env[69982]: _type = "Task" [ 672.293816] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.300009] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.300009] env[69982]: value = "task-3864103" [ 672.300009] env[69982]: _type = "Task" [ 672.300009] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.308473] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864102, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.317582] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864103, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.325235] env[69982]: INFO nova.compute.manager [-] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Took 2.09 seconds to deallocate network for instance. [ 672.414767] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 672.457631] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updated VIF entry in instance network info cache for port dd67d1de-a902-443e-a6bb-1cc4903f5ccd. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 672.458170] env[69982]: DEBUG nova.network.neutron [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updating instance_info_cache with network_info: [{"id": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "address": "fa:16:3e:10:0c:44", "network": {"id": "b8822d4c-5a5e-4410-a83d-8794b8e4f11e", "bridge": "br-int", "label": "tempest-ServersTestJSON-343969811-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c90023e59e624e6aa7d3d2363050619f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d62c1cf-f39a-4626-9552-f1e13c692636", "external-id": "nsx-vlan-transportzone-748", "segmentation_id": 748, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd67d1de-a9", "ovs_interfaceid": "dd67d1de-a902-443e-a6bb-1cc4903f5ccd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.596417] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864098, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.117285} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.596879] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 930c8740-5ad1-4491-8dd6-1a568eaa6f62/930c8740-5ad1-4491-8dd6-1a568eaa6f62.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.597386] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.597930] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63396eb5-4d45-4b14-bb62-802667e25024 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.607106] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 672.607106] env[69982]: value = "task-3864104" [ 672.607106] env[69982]: _type = "Task" [ 672.607106] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.619693] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.679027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.680485] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 672.682057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.129s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.787457] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.807485] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864102, 'name': Rename_Task, 'duration_secs': 0.247588} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.808397] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 672.808678] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc214995-770e-4ad9-9932-928e4a449c37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.813740] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864103, 'name': CreateVM_Task, 'duration_secs': 0.467517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.817068] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 672.818469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.818469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.818868] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 672.819529] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe382eb8-2e9e-40b1-b3d7-c25cf32dd6ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.823492] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 672.823492] env[69982]: value = "task-3864106" [ 672.823492] env[69982]: _type = "Task" [ 672.823492] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.831563] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 672.831563] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232899c-46a4-31df-623c-981306749950" [ 672.831563] env[69982]: _type = "Task" [ 672.831563] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.835338] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.836169] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864106, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.844859] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232899c-46a4-31df-623c-981306749950, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.933462] env[69982]: DEBUG nova.network.neutron [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.947207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.961921] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf896c9-2970-4f01-843f-765b548b07af req-faebfff7-12ea-45cd-8113-d8cb0a103619 service nova] Releasing lock "refresh_cache-1bdb1577-cc35-4839-8992-ae3b4ab87eb2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.120682] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078021} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.121195] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.122309] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c57043-3208-40ba-ad6e-50a3ef6af20f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.153733] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 930c8740-5ad1-4491-8dd6-1a568eaa6f62/930c8740-5ad1-4491-8dd6-1a568eaa6f62.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.155415] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Successfully created port: 8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.159715] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-174665de-796a-44be-a319-3f73eda9ff71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.186501] env[69982]: DEBUG nova.compute.utils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 673.188689] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 673.188689] env[69982]: value = "task-3864107" [ 673.188689] env[69982]: _type = "Task" [ 673.188689] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.189478] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 673.189707] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 673.197153] env[69982]: INFO nova.compute.claims [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.216548] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.336854] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864106, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.348537] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232899c-46a4-31df-623c-981306749950, 'name': SearchDatastore_Task, 'duration_secs': 0.020082} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.349082] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.349385] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.349703] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.349953] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.350233] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.350681] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ed60bf5-283e-48b3-8fad-e7fcec88315c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.359972] env[69982]: DEBUG nova.policy [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9a3b4f9b694316851d3ea26f8bca6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '344ff7edee7c427bbbdb29e1a8c91a46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 673.363131] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.363431] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.365374] env[69982]: DEBUG nova.network.neutron [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Updated VIF entry in instance network info cache for port c5d696dd-d8ff-4fcd-9357-3c1510432508. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 673.366194] env[69982]: DEBUG nova.network.neutron [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Updating instance_info_cache with network_info: [{"id": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "address": "fa:16:3e:ab:2e:39", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d696dd-d8", "ovs_interfaceid": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.367561] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c1c04e-dba2-4206-b087-26187c322d34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.375149] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 673.375149] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527f0d49-f59d-5810-9ed9-d79111493b34" [ 673.375149] env[69982]: _type = "Task" [ 673.375149] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.391045] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527f0d49-f59d-5810-9ed9-d79111493b34, 'name': SearchDatastore_Task, 'duration_secs': 0.011591} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.395017] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a1de33-1594-43fb-bb70-e1c220095fa5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.399433] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 673.399433] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d0723e-39f0-55e7-3b51-47c63d3385b2" [ 673.399433] env[69982]: _type = "Task" [ 673.399433] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.414974] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d0723e-39f0-55e7-3b51-47c63d3385b2, 'name': SearchDatastore_Task, 'duration_secs': 0.011691} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.415649] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.415649] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5c9b6dc6-887e-477a-b902-135fe06cfbbd/5c9b6dc6-887e-477a-b902-135fe06cfbbd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 673.416060] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eef84031-6706-433f-ac6e-fef4cadf9a69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.425370] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 673.425370] env[69982]: value = "task-3864108" [ 673.425370] env[69982]: _type = "Task" [ 673.425370] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.436145] env[69982]: INFO nova.compute.manager [-] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Took 2.84 seconds to deallocate network for instance. [ 673.436578] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.690479] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 673.711870] env[69982]: INFO nova.compute.resource_tracker [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating resource usage from migration 65e21314-0928-4022-97e3-0013ad63e1cd [ 673.723833] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864107, 'name': ReconfigVM_Task, 'duration_secs': 0.444623} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.724991] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 930c8740-5ad1-4491-8dd6-1a568eaa6f62/930c8740-5ad1-4491-8dd6-1a568eaa6f62.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.725849] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ff46da5-9513-4873-88a7-6d14aa5d9f39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.736037] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 673.736037] env[69982]: value = "task-3864109" [ 673.736037] env[69982]: _type = "Task" [ 673.736037] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.754178] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864109, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.837114] env[69982]: DEBUG oslo_vmware.api [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864106, 'name': PowerOnVM_Task, 'duration_secs': 0.718267} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.837707] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 673.838210] env[69982]: INFO nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Took 11.48 seconds to spawn the instance on the hypervisor. [ 673.839208] env[69982]: DEBUG nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 673.839563] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39acc28a-9b5f-4a4f-8c8a-61db7805ee40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.872410] env[69982]: DEBUG oslo_concurrency.lockutils [req-ece3e0fa-4fe6-44f4-9f5a-e33ef6825152 req-3af7704a-b76f-435d-a0ca-e1ccc6855e32 service nova] Releasing lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.896266] env[69982]: DEBUG nova.compute.manager [req-6091e422-d6e5-4be4-b96c-277c402d125f req-fa9b969e-bb45-479e-8d22-14475e342d58 service nova] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Received event network-vif-deleted-62d379af-7ac4-4a77-a97c-43b6d0fd5a3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 673.944469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.944778] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864108, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.217696] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbd5f4c-c3fa-4ed5-851e-387126d92182 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.229203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0301e8cd-6332-4d80-829e-ed7a707ed423 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.282068] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf42ad3-539c-4558-8b4b-753f1203c2a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.288233] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864109, 'name': Rename_Task, 'duration_secs': 0.259349} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.288960] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.289223] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bc13390-5961-4427-96e4-efd9ac4dbdba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.294930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571bf9f7-e5df-45db-90cc-8e3d51f0604f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.300312] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 674.300312] env[69982]: value = "task-3864110" [ 674.300312] env[69982]: _type = "Task" [ 674.300312] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.315292] env[69982]: DEBUG nova.compute.provider_tree [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 674.324321] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.361152] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Successfully created port: d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.371541] env[69982]: INFO nova.compute.manager [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Took 20.40 seconds to build instance. [ 674.449952] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.902459} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.450324] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5c9b6dc6-887e-477a-b902-135fe06cfbbd/5c9b6dc6-887e-477a-b902-135fe06cfbbd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 674.450611] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 674.450882] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf4a1e6a-1692-40d1-bcda-d73aa7340890 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.459071] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 674.459071] env[69982]: value = "task-3864111" [ 674.459071] env[69982]: _type = "Task" [ 674.459071] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.471678] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.720045] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 674.761401] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 674.761680] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 674.761835] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 674.762199] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 674.762199] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 674.762371] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 674.762907] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 674.762907] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 674.762907] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 674.763115] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 674.763150] env[69982]: DEBUG nova.virt.hardware [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 674.766879] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf39acdd-5ce4-4542-b646-dbdf537955b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.778051] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1001fb38-cff2-4e1b-9886-9025cef8cadb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.812458] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.852274] env[69982]: ERROR nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [req-7e3b2768-2f13-48bf-94ab-791a2c4f01af] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7e3b2768-2f13-48bf-94ab-791a2c4f01af"}]} [ 674.874675] env[69982]: DEBUG nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 674.877639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b97eec9-3ad8-4d05-9605-d40627ceaee6 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.928s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.898881] env[69982]: DEBUG nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 674.899316] env[69982]: DEBUG nova.compute.provider_tree [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 674.917496] env[69982]: DEBUG nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 674.940543] env[69982]: DEBUG nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 674.972963] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079632} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.973394] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 674.974581] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c97875-f02d-4793-82de-fa6b2af424a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.006416] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 5c9b6dc6-887e-477a-b902-135fe06cfbbd/5c9b6dc6-887e-477a-b902-135fe06cfbbd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 675.007912] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6330c76-55dd-4a1d-8a4d-898419e4ae35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.033555] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 675.033555] env[69982]: value = "task-3864112" [ 675.033555] env[69982]: _type = "Task" [ 675.033555] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.042853] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.186786] env[69982]: DEBUG nova.compute.manager [req-0b603dcb-5114-4803-bbf3-3f2aa7e7ec66 req-c03b4a91-0ff4-4043-952e-6e5b68174074 service nova] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Received event network-vif-deleted-9e46d480-3573-461c-a8d3-0a5bc4944687 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 675.226029] env[69982]: DEBUG nova.compute.manager [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.227091] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a583c1b-e675-47d1-83ea-e06fb4199c44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.322099] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.381254] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.455051] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b26f522-5762-4139-91ca-557803b027ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.466618] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff657f69-9d21-47e6-b65c-778c1ce48a2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.510819] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b78dd59-f877-4e5b-b4d7-d4aabc7ea818 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.520577] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35220599-3f60-4f44-b001-ed78c8472e8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.545200] env[69982]: DEBUG nova.compute.provider_tree [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 675.560345] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.756264] env[69982]: INFO nova.compute.manager [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] instance snapshotting [ 675.762808] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ebb4c1-63a1-447f-a2ab-ac56a5a6527a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.790582] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc32eb87-94fd-4ef6-8a9a-6d3816652d89 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.815136] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.930843] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.965685] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Successfully updated port: 8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.059678] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864112, 'name': ReconfigVM_Task, 'duration_secs': 0.633373} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.059678] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 5c9b6dc6-887e-477a-b902-135fe06cfbbd/5c9b6dc6-887e-477a-b902-135fe06cfbbd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 676.059678] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-350be469-c8e8-4f77-83fa-1bfe2d5d6b71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.072648] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 676.072648] env[69982]: value = "task-3864113" [ 676.072648] env[69982]: _type = "Task" [ 676.072648] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.088494] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864113, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.119094] env[69982]: DEBUG nova.scheduler.client.report [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 27 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 676.121106] env[69982]: DEBUG nova.compute.provider_tree [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 27 to 28 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 676.121106] env[69982]: DEBUG nova.compute.provider_tree [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 676.303553] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 676.303944] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4699f526-c201-4ddc-83a9-7e71a717a95d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.316251] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task} progress is 68%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.318036] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 676.318036] env[69982]: value = "task-3864114" [ 676.318036] env[69982]: _type = "Task" [ 676.318036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.326664] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864114, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.470337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.470634] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.470634] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 676.586110] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864113, 'name': Rename_Task, 'duration_secs': 0.169688} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.586446] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 676.586702] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4457bf7a-1ef4-4214-8abf-dca3f8899980 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.595440] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 676.595440] env[69982]: value = "task-3864115" [ 676.595440] env[69982]: _type = "Task" [ 676.595440] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.607471] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.625121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 3.943s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.625418] env[69982]: INFO nova.compute.manager [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Migrating [ 676.625582] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.625786] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 676.627382] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.862s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.635309] env[69982]: INFO nova.compute.claims [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.639141] env[69982]: INFO nova.compute.rpcapi [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 676.639772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.684664] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Successfully updated port: d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 676.824712] env[69982]: DEBUG oslo_vmware.api [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864110, 'name': PowerOnVM_Task, 'duration_secs': 2.118895} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.827324] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.827394] env[69982]: INFO nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Took 11.93 seconds to spawn the instance on the hypervisor. [ 676.827636] env[69982]: DEBUG nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.828536] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aeaf3b-ce00-4f2b-9f56-6973c7928873 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.838861] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864114, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.063214] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 677.111568] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864115, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.156974] env[69982]: DEBUG nova.compute.manager [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 677.157206] env[69982]: DEBUG nova.compute.manager [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing instance network info cache due to event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 677.157436] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.157568] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.157727] env[69982]: DEBUG nova.network.neutron [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 677.159642] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.160058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.160058] env[69982]: DEBUG nova.network.neutron [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 677.197394] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.197394] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.197394] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 677.335576] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864114, 'name': CreateSnapshot_Task, 'duration_secs': 0.790144} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.335978] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 677.336881] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09db10b0-b398-49f1-ab75-b3c59fa32d72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.362530] env[69982]: INFO nova.compute.manager [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Took 20.73 seconds to build instance. [ 677.599844] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "ebd9e006-a591-44f7-867c-041731b9d45a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.599844] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.612102] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864115, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.766473] env[69982]: DEBUG nova.network.neutron [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Updating instance_info_cache with network_info: [{"id": "8e565aa3-376d-4b91-8dac-bc818531956d", "address": "fa:16:3e:91:43:b8", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e565aa3-37", "ovs_interfaceid": "8e565aa3-376d-4b91-8dac-bc818531956d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.823133] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 677.879876] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 677.886571] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c9ab911-fee8-4ca1-b3aa-6befaa788b1b tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.278s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.888275] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b794b8f5-13f5-41db-80b7-4d6c998318c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.917035] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 677.917035] env[69982]: value = "task-3864117" [ 677.917035] env[69982]: _type = "Task" [ 677.917035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.936362] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.119899] env[69982]: DEBUG oslo_vmware.api [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864115, 'name': PowerOnVM_Task, 'duration_secs': 1.445017} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.123046] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.123046] env[69982]: INFO nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Took 10.67 seconds to spawn the instance on the hypervisor. [ 678.123046] env[69982]: DEBUG nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.123046] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1fb616-a7fd-47e5-9d58-53ba8800d909 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.231316] env[69982]: DEBUG nova.compute.manager [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Received event network-changed-79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 678.231803] env[69982]: DEBUG nova.compute.manager [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Refreshing instance network info cache due to event network-changed-79537ce6-7e7e-4621-b2a8-e38c01d51f7d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 678.232246] env[69982]: DEBUG oslo_concurrency.lockutils [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] Acquiring lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 678.232504] env[69982]: DEBUG oslo_concurrency.lockutils [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] Acquired lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 678.232765] env[69982]: DEBUG nova.network.neutron [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Refreshing network info cache for port 79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 678.268040] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442081c9-d338-4cf6-9ccf-8f9142801773 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.270281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.270281] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Instance network_info: |[{"id": "8e565aa3-376d-4b91-8dac-bc818531956d", "address": "fa:16:3e:91:43:b8", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e565aa3-37", "ovs_interfaceid": "8e565aa3-376d-4b91-8dac-bc818531956d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 678.270493] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:43:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e565aa3-376d-4b91-8dac-bc818531956d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.278446] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.282397] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.285940] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fab3efcb-6d5e-433a-862f-277ca0942e0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.306716] env[69982]: DEBUG nova.network.neutron [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [{"id": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "address": "fa:16:3e:52:73:aa", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73ed1df-de", "ovs_interfaceid": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.309489] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff67772-a0ca-4ccd-a2f3-fa61343dffba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.344527] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.344527] env[69982]: value = "task-3864118" [ 678.344527] env[69982]: _type = "Task" [ 678.344527] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.346689] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a46c43-745a-4559-a802-b9df882cbfad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.351015] env[69982]: DEBUG nova.network.neutron [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.367681] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a96143-80b5-4df2-9f2f-47a2901ea625 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.372753] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864118, 'name': CreateVM_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.385253] env[69982]: DEBUG nova.compute.provider_tree [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.402788] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 678.427632] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.485745] env[69982]: DEBUG nova.network.neutron [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updated VIF entry in instance network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 678.486287] env[69982]: DEBUG nova.network.neutron [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.651483] env[69982]: INFO nova.compute.manager [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Took 20.82 seconds to build instance. [ 678.815179] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.815651] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Instance network_info: |[{"id": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "address": "fa:16:3e:52:73:aa", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73ed1df-de", "ovs_interfaceid": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 678.820226] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:73:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6815237d-f565-474d-a3c0-9c675478eb00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd73ed1df-de67-4db7-82b5-7f693bab3f55', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.829897] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Creating folder: Project (344ff7edee7c427bbbdb29e1a8c91a46). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.833383] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a7ff935-a271-498a-ad2b-b0baa163883f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.844732] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Created folder: Project (344ff7edee7c427bbbdb29e1a8c91a46) in parent group-v767796. [ 678.844935] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Creating folder: Instances. Parent ref: group-v767841. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.845217] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35349efb-de88-40ed-a2eb-395940a615dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.857764] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.860959] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864118, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.864279] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Created folder: Instances in parent group-v767841. [ 678.864549] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.864816] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.864986] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bee53fe-542a-4d18-92e1-aa61e91fefca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.885168] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.885168] env[69982]: value = "task-3864122" [ 678.885168] env[69982]: _type = "Task" [ 678.885168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.891403] env[69982]: DEBUG nova.scheduler.client.report [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.899078] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864122, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.931526] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.939751] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.989168] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.989457] env[69982]: DEBUG nova.compute.manager [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Received event network-vif-plugged-8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 678.989748] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Acquiring lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.989848] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.990008] env[69982]: DEBUG oslo_concurrency.lockutils [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.990324] env[69982]: DEBUG nova.compute.manager [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] No waiting events found dispatching network-vif-plugged-8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.990497] env[69982]: WARNING nova.compute.manager [req-d8af2668-227a-4e94-acc3-69e6534fbe95 req-9468f7b8-3103-4fb3-822d-596ad25c1deb service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Received unexpected event network-vif-plugged-8e565aa3-376d-4b91-8dac-bc818531956d for instance with vm_state building and task_state spawning. [ 679.155385] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51179afe-98bd-463c-bc88-a84680cc4ab0 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.344s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.367755] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864118, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.394554] env[69982]: DEBUG nova.network.neutron [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updated VIF entry in instance network info cache for port 79537ce6-7e7e-4621-b2a8-e38c01d51f7d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 679.394554] env[69982]: DEBUG nova.network.neutron [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating instance_info_cache with network_info: [{"id": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "address": "fa:16:3e:b7:54:77", "network": {"id": "cc0fd9bc-db9f-4bd5-a4cf-197e8f8c0253", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1842391413-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "43849a9ac5c349d29082fc270a8afd0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79537ce6-7e", "ovs_interfaceid": "79537ce6-7e7e-4621-b2a8-e38c01d51f7d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 679.404195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.404958] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 679.411336] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864122, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.412124] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.574s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.414207] env[69982]: INFO nova.compute.claims [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.433258] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.862164] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864118, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.905634] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864122, 'name': CreateVM_Task, 'duration_secs': 0.522386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.905843] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.906693] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.906745] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.907389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.908081] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d236915-98cd-4709-8bd2-5403369912ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.912802] env[69982]: DEBUG oslo_concurrency.lockutils [req-36eb2c97-4bee-4dc2-9431-f76062175fba req-2a2e574e-f89c-49b4-b0cf-95d0debf0a74 service nova] Releasing lock "refresh_cache-a8217447-bc22-4b84-925f-c3c09fb7228c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.915898] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 679.915898] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5249d6bb-51d2-f809-1b65-af03be10b560" [ 679.915898] env[69982]: _type = "Task" [ 679.915898] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.923561] env[69982]: DEBUG nova.compute.utils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 679.928266] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 679.928447] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.945161] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5249d6bb-51d2-f809-1b65-af03be10b560, 'name': SearchDatastore_Task, 'duration_secs': 0.019276} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.945960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.947773] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.947773] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.947773] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.947773] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.948581] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31e1dd88-313f-41af-8af6-c149d9b533d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.957988] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.967107] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.967328] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.968301] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e4954d6-f176-4981-bee8-93fe0d7942c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.979035] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 679.979035] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527a8d8b-a650-6c89-55ee-e20b4bf5b053" [ 679.979035] env[69982]: _type = "Task" [ 679.979035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.988228] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527a8d8b-a650-6c89-55ee-e20b4bf5b053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.008110] env[69982]: DEBUG nova.policy [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a151a7434f534a08928e77604bf89864', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c45eb4f6d09840868b9307a14066cece', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.248620] env[69982]: DEBUG nova.compute.manager [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Received event network-changed {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 680.248927] env[69982]: DEBUG nova.compute.manager [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Refreshing instance network info cache due to event network-changed. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 680.249268] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] Acquiring lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.249472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] Acquired lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.249772] env[69982]: DEBUG nova.network.neutron [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 680.363363] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864118, 'name': CreateVM_Task, 'duration_secs': 1.713472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.363614] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 680.365322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.365322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.365944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 680.365944] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7c73d4-3a2c-4a34-9a7e-807773f0e0de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.376879] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 680.376879] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237237c-ea9f-73fb-06c2-8afb3503c81d" [ 680.376879] env[69982]: _type = "Task" [ 680.376879] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.382338] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-522e6a5f-d120-49af-8f0b-ea6a50c18820 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.392770] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237237c-ea9f-73fb-06c2-8afb3503c81d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.409478] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 680.422729] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Successfully created port: 98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.436165] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 680.445797] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864117, 'name': CloneVM_Task, 'duration_secs': 2.079294} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.446198] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Created linked-clone VM from snapshot [ 680.448201] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88f86e6-2dbe-4177-a260-c055f1bead38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.462883] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Uploading image 7363fbc8-a570-4772-a912-a05e3c119422 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 680.501189] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 680.501189] env[69982]: value = "vm-767839" [ 680.501189] env[69982]: _type = "VirtualMachine" [ 680.501189] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 680.501765] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9a997cbe-2ad1-4b1d-aa40-3dfff6fe91b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.508019] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527a8d8b-a650-6c89-55ee-e20b4bf5b053, 'name': SearchDatastore_Task, 'duration_secs': 0.020768} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.512085] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4440af56-522c-4be8-9085-a216a8afa989 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.519530] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lease: (returnval){ [ 680.519530] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527339b2-0b7c-3c54-a251-fa1f57f59850" [ 680.519530] env[69982]: _type = "HttpNfcLease" [ 680.519530] env[69982]: } obtained for exporting VM: (result){ [ 680.519530] env[69982]: value = "vm-767839" [ 680.519530] env[69982]: _type = "VirtualMachine" [ 680.519530] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 680.519867] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the lease: (returnval){ [ 680.519867] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527339b2-0b7c-3c54-a251-fa1f57f59850" [ 680.519867] env[69982]: _type = "HttpNfcLease" [ 680.519867] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 680.525336] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 680.525336] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ad1b3-33d5-98a2-92f9-ae33e90e9659" [ 680.525336] env[69982]: _type = "Task" [ 680.525336] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.532259] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 680.532259] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527339b2-0b7c-3c54-a251-fa1f57f59850" [ 680.532259] env[69982]: _type = "HttpNfcLease" [ 680.532259] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 680.532898] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 680.532898] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527339b2-0b7c-3c54-a251-fa1f57f59850" [ 680.532898] env[69982]: _type = "HttpNfcLease" [ 680.532898] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 680.533724] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725982e4-e0a9-42b5-82e2-5370c83b9470 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.542245] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ad1b3-33d5-98a2-92f9-ae33e90e9659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.549159] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 680.549409] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 680.683808] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0d30a29e-9d3e-430b-87fb-2f8039ad5c4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.891140] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237237c-ea9f-73fb-06c2-8afb3503c81d, 'name': SearchDatastore_Task, 'duration_secs': 0.022195} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.894091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.894373] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 680.894628] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.894789] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.894988] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.895822] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dd9eb31-9b0d-4f9a-86ef-1ba8e2fc8953 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.906959] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.906959] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 680.907616] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62bb6bce-7534-49dc-bfe8-58f1bd950697 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.918199] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 680.918637] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 680.918637] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dc6ff1-571d-0556-a0c4-885130261c96" [ 680.918637] env[69982]: _type = "Task" [ 680.918637] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.924295] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9558bdb0-5776-48af-a6d6-ae20bccd7ea8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.937367] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dc6ff1-571d-0556-a0c4-885130261c96, 'name': SearchDatastore_Task, 'duration_secs': 0.014049} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.951673] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 680.951673] env[69982]: value = "task-3864125" [ 680.951673] env[69982]: _type = "Task" [ 680.951673] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.959722] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf7cbb76-aa50-4701-9e1d-694cb73afa23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.980288] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.980288] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 680.980288] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52876362-636d-031d-18ae-4708c3a5d24c" [ 680.980288] env[69982]: _type = "Task" [ 680.980288] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.982313] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c269208-5ddf-4777-872e-f9351ff0325d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.010497] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52876362-636d-031d-18ae-4708c3a5d24c, 'name': SearchDatastore_Task, 'duration_secs': 0.016428} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.013483] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64731d8c-a446-4bce-8df0-2c9e2af6823c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.018280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.018744] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b/4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.019902] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d91f28db-b630-4ebe-b0df-1eed9abac33f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.066661] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fba42136-c775-45e8-a53d-68c7b99633e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.071224] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 681.071224] env[69982]: value = "task-3864126" [ 681.071224] env[69982]: _type = "Task" [ 681.071224] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.081477] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ad1b3-33d5-98a2-92f9-ae33e90e9659, 'name': SearchDatastore_Task, 'duration_secs': 0.013744} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.083322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.083322] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 2d554902-bf28-4ee2-b9d6-4219e54246fc/2d554902-bf28-4ee2-b9d6-4219e54246fc.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.087222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c441a0-6c73-4d21-a7ae-af561614e3eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.095020] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93997598-1d82-4b9b-a8b8-2b7279cb148b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.097745] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.099636] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Received event network-changed-8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.099836] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Refreshing instance network info cache due to event network-changed-8e565aa3-376d-4b91-8dac-bc818531956d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 681.100097] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Acquiring lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.100332] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Acquired lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.100602] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Refreshing network info cache for port 8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 681.116679] env[69982]: DEBUG nova.compute.provider_tree [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.120214] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 681.120214] env[69982]: value = "task-3864127" [ 681.120214] env[69982]: _type = "Task" [ 681.120214] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.136431] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.280204] env[69982]: DEBUG nova.network.neutron [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Updating instance_info_cache with network_info: [{"id": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "address": "fa:16:3e:ab:2e:39", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5d696dd-d8", "ovs_interfaceid": "c5d696dd-d8ff-4fcd-9357-3c1510432508", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.457418] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 681.477741] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864125, 'name': PowerOffVM_Task, 'duration_secs': 0.26159} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.478825] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 681.478825] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 681.504323] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 681.506366] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.506366] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 681.506366] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.506366] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 681.506366] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 681.506675] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 681.507076] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 681.507332] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 681.507998] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 681.507998] env[69982]: DEBUG nova.virt.hardware [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 681.509520] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ce4580-3838-4eb8-8b27-9a681c30bb13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.527078] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb358d7-f523-42ec-bd2c-86616040ac40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.580037] env[69982]: DEBUG nova.compute.manager [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Received event network-changed-066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.581386] env[69982]: DEBUG nova.compute.manager [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Refreshing instance network info cache due to event network-changed-066271e7-f03a-48d7-a4a4-df17ef2b24f4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 681.581946] env[69982]: DEBUG oslo_concurrency.lockutils [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] Acquiring lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.582196] env[69982]: DEBUG oslo_concurrency.lockutils [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] Acquired lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.582539] env[69982]: DEBUG nova.network.neutron [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Refreshing network info cache for port 066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 681.590725] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864126, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.622582] env[69982]: DEBUG nova.scheduler.client.report [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 681.645312] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864127, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.791210] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dbb28406-f2a5-4390-bf11-82e1de0e8204 tempest-ServerExternalEventsTest-301712414 tempest-ServerExternalEventsTest-301712414-project] Releasing lock "refresh_cache-5c9b6dc6-887e-477a-b902-135fe06cfbbd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.796602] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.796774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.991189] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 681.991189] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.991189] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 681.991189] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.991440] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 681.991440] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 681.991440] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 681.991440] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 681.991440] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 681.991613] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 681.991613] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 681.996528] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2d270e7-de85-4126-9d53-a64b37d7c537 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.014956] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 682.014956] env[69982]: value = "task-3864128" [ 682.014956] env[69982]: _type = "Task" [ 682.014956] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.026919] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.031252] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Updated VIF entry in instance network info cache for port 8e565aa3-376d-4b91-8dac-bc818531956d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 682.032133] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Updating instance_info_cache with network_info: [{"id": "8e565aa3-376d-4b91-8dac-bc818531956d", "address": "fa:16:3e:91:43:b8", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e565aa3-37", "ovs_interfaceid": "8e565aa3-376d-4b91-8dac-bc818531956d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.086670] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70701} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.087036] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b/4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.087185] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.087443] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d51f65f-cc84-45f7-91a4-3f312a6950fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.096099] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 682.096099] env[69982]: value = "task-3864129" [ 682.096099] env[69982]: _type = "Task" [ 682.096099] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.106432] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.131031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.131031] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 682.132439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.791s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.133165] env[69982]: DEBUG nova.objects.instance [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lazy-loading 'resources' on Instance uuid 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 682.151770] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850935} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.157190] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 2d554902-bf28-4ee2-b9d6-4219e54246fc/2d554902-bf28-4ee2-b9d6-4219e54246fc.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.157715] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 682.159892] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-449a21fc-bf75-41b5-bb88-7117a06ba5fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.169405] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 682.169405] env[69982]: value = "task-3864130" [ 682.169405] env[69982]: _type = "Task" [ 682.169405] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.184018] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.254723] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.256033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.256033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.256033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.256033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.258261] env[69982]: INFO nova.compute.manager [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Terminating instance [ 682.299671] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 682.485739] env[69982]: DEBUG nova.network.neutron [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updated VIF entry in instance network info cache for port 066271e7-f03a-48d7-a4a4-df17ef2b24f4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 682.486016] env[69982]: DEBUG nova.network.neutron [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updating instance_info_cache with network_info: [{"id": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "address": "fa:16:3e:e9:6e:ee", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap066271e7-f0", "ovs_interfaceid": "066271e7-f03a-48d7-a4a4-df17ef2b24f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.527403] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864128, 'name': ReconfigVM_Task, 'duration_secs': 0.305191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.527403] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 682.536031] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Releasing lock "refresh_cache-4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.536431] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received event network-vif-plugged-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.536735] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Acquiring lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.537044] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.537321] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.537543] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] No waiting events found dispatching network-vif-plugged-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 682.537752] env[69982]: WARNING nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received unexpected event network-vif-plugged-d73ed1df-de67-4db7-82b5-7f693bab3f55 for instance with vm_state building and task_state spawning. [ 682.537966] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.538287] env[69982]: DEBUG nova.compute.manager [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing instance network info cache due to event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 682.538596] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Acquiring lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.538785] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Acquired lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.539031] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 682.607606] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.205575} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.607950] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.608798] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f19b3c4-963b-490c-bc8a-882e3fb27d1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.633035] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b/4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.636063] env[69982]: DEBUG nova.compute.utils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 682.636063] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-359e4591-2311-42dc-9b5a-549f874bfd4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.652982] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 682.653246] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.665586] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 682.665586] env[69982]: value = "task-3864131" [ 682.665586] env[69982]: _type = "Task" [ 682.665586] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.676409] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.681847] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148177} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.684789] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.686088] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a14fff-dbf6-4186-95ea-6866f5430158 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.712509] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] 2d554902-bf28-4ee2-b9d6-4219e54246fc/2d554902-bf28-4ee2-b9d6-4219e54246fc.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.716249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74a7935b-bd18-4a80-beb6-713e1157a196 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.737809] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 682.737809] env[69982]: value = "task-3864132" [ 682.737809] env[69982]: _type = "Task" [ 682.737809] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.747719] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.753452] env[69982]: DEBUG nova.policy [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfc67b7594c741d8b9efe92f19bd25b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '662f6c17d09f4f8b873a6b50f84d8233', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 682.765561] env[69982]: DEBUG nova.compute.manager [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 682.766081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.767106] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06895c59-001e-42e7-83a2-8b764714b79d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.775203] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.775464] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fffffd3a-596d-47eb-8cf1-ff2b8bf92763 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.785503] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 682.785503] env[69982]: value = "task-3864133" [ 682.785503] env[69982]: _type = "Task" [ 682.785503] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.795446] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864133, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.824432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.988681] env[69982]: DEBUG oslo_concurrency.lockutils [req-5329daca-ef82-44e2-b590-886ac3a6dbda req-0369b730-0480-40ab-bda9-ed3ca3048666 service nova] Releasing lock "refresh_cache-930c8740-5ad1-4491-8dd6-1a568eaa6f62" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.035175] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 683.035486] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.035779] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 683.036185] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.036402] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 683.036512] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 683.036783] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 683.037085] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 683.037514] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 683.037888] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 683.037962] env[69982]: DEBUG nova.virt.hardware [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 683.043699] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfiguring VM instance instance-00000003 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 683.050098] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cce41817-87ff-48e7-a47e-89a170096b31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.075373] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 683.075373] env[69982]: value = "task-3864134" [ 683.075373] env[69982]: _type = "Task" [ 683.075373] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.081142] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea734b4b-6075-4671-8cbc-f2f08c8758e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.087286] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.092792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d6c94e-05cf-45b1-a725-a77bcbbfd73c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.130096] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Successfully updated port: 98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 683.134205] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b604bbe-5055-4993-9cff-83bf8019d36e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.150210] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699f5cf1-b8fa-4e37-8c2a-c7e761bb00c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.156495] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.177139] env[69982]: DEBUG nova.compute.provider_tree [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.189062] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.249499] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.296233] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864133, 'name': PowerOffVM_Task, 'duration_secs': 0.226865} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.296483] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.296695] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.297047] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b20466b-a689-421e-9d46-8af685c4983e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.372760] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.373131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.373420] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Deleting the datastore file [datastore1] 5c9b6dc6-887e-477a-b902-135fe06cfbbd {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.374461] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d5c9fce-29d4-4369-b681-d4e38f87f8ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.381389] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for the task: (returnval){ [ 683.381389] env[69982]: value = "task-3864136" [ 683.381389] env[69982]: _type = "Task" [ 683.381389] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.390883] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.587376] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864134, 'name': ReconfigVM_Task, 'duration_secs': 0.253616} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.587824] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfigured VM instance instance-00000003 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 683.589155] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc7e7ab-cce4-4a85-8b3a-bde2ce7deb99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.628644] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.629044] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8ede988-923e-4134-832c-f8b41b4adb80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.646389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.646389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquired lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.646389] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 683.655814] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 683.655814] env[69982]: value = "task-3864137" [ 683.655814] env[69982]: _type = "Task" [ 683.655814] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.676265] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864137, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.683094] env[69982]: DEBUG nova.scheduler.client.report [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.690395] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864131, 'name': ReconfigVM_Task, 'duration_secs': 0.643506} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.690949] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b/4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.691629] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a9113f5-e6a2-4042-8140-d248b742d4a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.700449] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 683.700449] env[69982]: value = "task-3864138" [ 683.700449] env[69982]: _type = "Task" [ 683.700449] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.710706] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864138, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.735646] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updated VIF entry in instance network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 683.735646] env[69982]: DEBUG nova.network.neutron [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [{"id": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "address": "fa:16:3e:52:73:aa", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73ed1df-de", "ovs_interfaceid": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.749628] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864132, 'name': ReconfigVM_Task, 'duration_secs': 0.59953} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.750313] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Reconfigured VM instance instance-0000000f to attach disk [datastore2] 2d554902-bf28-4ee2-b9d6-4219e54246fc/2d554902-bf28-4ee2-b9d6-4219e54246fc.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.750987] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac22e27f-a1ae-418a-b774-b7feb578ee9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.760526] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 683.760526] env[69982]: value = "task-3864139" [ 683.760526] env[69982]: _type = "Task" [ 683.760526] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.774566] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864139, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.803587] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Successfully created port: ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 683.895631] env[69982]: DEBUG oslo_vmware.api [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Task: {'id': task-3864136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239097} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.895631] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.895822] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 683.896065] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 683.897570] env[69982]: INFO nova.compute.manager [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 683.897570] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.897570] env[69982]: DEBUG nova.compute.manager [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 683.897570] env[69982]: DEBUG nova.network.neutron [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 684.167418] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864137, 'name': ReconfigVM_Task, 'duration_secs': 0.415874} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.167709] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99/35fdf25e-c8c0-4123-a95a-2a4c1a504f99.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.167971] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 684.172711] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.193141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.060s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.197940] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.470s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.203255] env[69982]: INFO nova.compute.claims [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.217626] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864138, 'name': Rename_Task, 'duration_secs': 0.171065} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.220539] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.220870] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.220954] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.223012] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.223012] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.223012] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.223012] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.223012] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.223342] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.223342] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.223342] env[69982]: DEBUG nova.virt.hardware [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.223342] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.224104] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e25e8f7-7d5f-4cf8-b56b-33f57034362c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.230411] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 684.231766] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bee5330b-de09-4586-905f-1d6f8532962d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.240639] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd80046c-90dd-4e57-9f17-647f54094142 req-b335077c-af9c-4153-9d1b-6ed3df3f8a59 service nova] Releasing lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.245238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5876b7e8-6dc1-48a4-bfe2-631261d6b802 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.252192] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 684.252192] env[69982]: value = "task-3864140" [ 684.252192] env[69982]: _type = "Task" [ 684.252192] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.252192] env[69982]: INFO nova.scheduler.client.report [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Deleted allocations for instance 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2 [ 684.284470] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864140, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.287704] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864139, 'name': Rename_Task, 'duration_secs': 0.168018} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.287901] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.288833] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e964a937-eb93-43a4-a113-3a0b940ec897 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.297779] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 684.297779] env[69982]: value = "task-3864141" [ 684.297779] env[69982]: _type = "Task" [ 684.297779] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.307480] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.517432] env[69982]: DEBUG nova.compute.manager [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Received event network-vif-plugged-98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.517432] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Acquiring lock "049c7405-3daf-4064-8770-efbbf15c832e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.517432] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Lock "049c7405-3daf-4064-8770-efbbf15c832e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.517432] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Lock "049c7405-3daf-4064-8770-efbbf15c832e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.517902] env[69982]: DEBUG nova.compute.manager [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] No waiting events found dispatching network-vif-plugged-98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.517902] env[69982]: WARNING nova.compute.manager [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Received unexpected event network-vif-plugged-98f315fb-5979-46b6-aa4e-c58039f5c013 for instance with vm_state building and task_state spawning. [ 684.517902] env[69982]: DEBUG nova.compute.manager [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Received event network-changed-98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.518097] env[69982]: DEBUG nova.compute.manager [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Refreshing instance network info cache due to event network-changed-98f315fb-5979-46b6-aa4e-c58039f5c013. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 684.518291] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Acquiring lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.524123] env[69982]: DEBUG nova.network.neutron [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Updating instance_info_cache with network_info: [{"id": "98f315fb-5979-46b6-aa4e-c58039f5c013", "address": "fa:16:3e:5d:f4:ae", "network": {"id": "bd8ca5c6-0338-43f1-9805-256a8c0e1ac7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-948794380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c45eb4f6d09840868b9307a14066cece", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f315fb-59", "ovs_interfaceid": "98f315fb-5979-46b6-aa4e-c58039f5c013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.681257] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e55a2a-ea19-4748-93c8-d3109f205c81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.704918] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba41839-cc55-4a6c-8761-508b63b4f944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.728979] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 684.765172] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864140, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.774481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e41101fc-6ada-4197-8ee8-a24dbf90752f tempest-ServerDiagnosticsV248Test-1814849516 tempest-ServerDiagnosticsV248Test-1814849516-project-member] Lock "3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.912s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.811134] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864141, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.030586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Releasing lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.030789] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Instance network_info: |[{"id": "98f315fb-5979-46b6-aa4e-c58039f5c013", "address": "fa:16:3e:5d:f4:ae", "network": {"id": "bd8ca5c6-0338-43f1-9805-256a8c0e1ac7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-948794380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c45eb4f6d09840868b9307a14066cece", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f315fb-59", "ovs_interfaceid": "98f315fb-5979-46b6-aa4e-c58039f5c013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 685.031808] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Acquired lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.032800] env[69982]: DEBUG nova.network.neutron [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Refreshing network info cache for port 98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 685.036083] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:f4:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d413776-9a8c-4afd-856f-10dbb062ca95', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98f315fb-5979-46b6-aa4e-c58039f5c013', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 685.044309] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Creating folder: Project (c45eb4f6d09840868b9307a14066cece). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.044309] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-98c427cc-a82a-44d9-9285-0cc09f2fe832 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.057585] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Created folder: Project (c45eb4f6d09840868b9307a14066cece) in parent group-v767796. [ 685.058365] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Creating folder: Instances. Parent ref: group-v767844. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 685.058541] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ad7b830-b244-47d6-97e1-dbf1bfc50cd8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.074233] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Created folder: Instances in parent group-v767844. [ 685.075268] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.075268] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 685.075268] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0fca2364-2c15-4556-9ca3-c89114cd8e02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.100774] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 685.100774] env[69982]: value = "task-3864144" [ 685.100774] env[69982]: _type = "Task" [ 685.100774] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.110199] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864144, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.269883] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864140, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.310395] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864141, 'name': PowerOnVM_Task} progress is 91%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.340484] env[69982]: DEBUG nova.network.neutron [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Port 5d41e03b-3fd8-4389-a457-2000cf628f86 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 685.466454] env[69982]: DEBUG nova.network.neutron [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.617839] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864144, 'name': CreateVM_Task, 'duration_secs': 0.450558} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.619100] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 685.621745] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.622153] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.622504] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 685.626048] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5901218c-3b90-46b1-8ddb-4867be395237 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.632723] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 685.632723] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5224c075-7f13-1dee-8070-e55bec571863" [ 685.632723] env[69982]: _type = "Task" [ 685.632723] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.651150] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5224c075-7f13-1dee-8070-e55bec571863, 'name': SearchDatastore_Task, 'duration_secs': 0.013593} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.651508] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.651750] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 685.652469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.652469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.652469] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.652651] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-878ebac7-dfa5-4616-a372-aaee7073037f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.663026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a36fa7-b5f3-4071-a165-488b036c7ca1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.665041] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.665244] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 685.666425] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a657612e-3062-4240-b93f-d6a451418ae4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.672626] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-535a446a-ef7a-49fd-aa51-dd35a66eb8af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.678607] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 685.678607] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52308b67-a37c-661e-cdff-7e29afcb2074" [ 685.678607] env[69982]: _type = "Task" [ 685.678607] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.722053] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e362e184-17a6-4279-8b17-e1a1cbfff68f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.730040] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52308b67-a37c-661e-cdff-7e29afcb2074, 'name': SearchDatastore_Task, 'duration_secs': 0.016204} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.730296] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b517a70f-6a30-4898-a8a1-c41dd3740c1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.737542] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de3bdde-b37f-4d70-9d34-f4d447a79f71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.744053] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 685.744053] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52daa8f1-3c05-fbf4-6e1f-251e7f89d45e" [ 685.744053] env[69982]: _type = "Task" [ 685.744053] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.760268] env[69982]: DEBUG nova.compute.provider_tree [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.769585] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52daa8f1-3c05-fbf4-6e1f-251e7f89d45e, 'name': SearchDatastore_Task, 'duration_secs': 0.022829} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.770316] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.770591] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 049c7405-3daf-4064-8770-efbbf15c832e/049c7405-3daf-4064-8770-efbbf15c832e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.770856] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6f18023-738a-4a1d-a3bb-d539247cd823 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.777449] env[69982]: DEBUG oslo_vmware.api [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864140, 'name': PowerOnVM_Task, 'duration_secs': 1.50901} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.777760] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.778119] env[69982]: INFO nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Took 13.68 seconds to spawn the instance on the hypervisor. [ 685.778119] env[69982]: DEBUG nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.779222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577ae39d-03c8-413b-a53d-e149a3b37246 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.783597] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 685.783597] env[69982]: value = "task-3864145" [ 685.783597] env[69982]: _type = "Task" [ 685.783597] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.801223] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.814403] env[69982]: DEBUG oslo_vmware.api [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864141, 'name': PowerOnVM_Task, 'duration_secs': 1.021314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.815077] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.815077] env[69982]: INFO nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Took 11.10 seconds to spawn the instance on the hypervisor. [ 685.815274] env[69982]: DEBUG nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.816170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302d4414-5046-4fed-b594-e7f03e6b11a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.955922] env[69982]: DEBUG nova.network.neutron [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Updated VIF entry in instance network info cache for port 98f315fb-5979-46b6-aa4e-c58039f5c013. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 685.956321] env[69982]: DEBUG nova.network.neutron [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Updating instance_info_cache with network_info: [{"id": "98f315fb-5979-46b6-aa4e-c58039f5c013", "address": "fa:16:3e:5d:f4:ae", "network": {"id": "bd8ca5c6-0338-43f1-9805-256a8c0e1ac7", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-948794380-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c45eb4f6d09840868b9307a14066cece", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d413776-9a8c-4afd-856f-10dbb062ca95", "external-id": "nsx-vlan-transportzone-913", "segmentation_id": 913, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98f315fb-59", "ovs_interfaceid": "98f315fb-5979-46b6-aa4e-c58039f5c013", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.971598] env[69982]: INFO nova.compute.manager [-] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Took 2.07 seconds to deallocate network for instance. [ 686.272568] env[69982]: DEBUG nova.scheduler.client.report [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.307425] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864145, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.313440] env[69982]: INFO nova.compute.manager [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Took 25.57 seconds to build instance. [ 686.339439] env[69982]: INFO nova.compute.manager [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Took 25.39 seconds to build instance. [ 686.380854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.380854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.380854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.394644] env[69982]: DEBUG nova.compute.manager [req-6e1b65ca-0963-46de-b012-43df44c1d036 req-dd35aebd-9bc9-405f-ae4f-166e970ed485 service nova] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Received event network-vif-deleted-c5d696dd-d8ff-4fcd-9357-3c1510432508 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 686.466616] env[69982]: DEBUG oslo_concurrency.lockutils [req-7eef4a45-809f-4247-b369-155785cdc5d6 req-beaf9765-02db-4fc1-a522-30c337de5642 service nova] Releasing lock "refresh_cache-049c7405-3daf-4064-8770-efbbf15c832e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.480497] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.634533] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "67f59d53-c61b-48ad-b55d-710595e9dae3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.634870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.775754] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Successfully updated port: ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 686.780244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.582s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.783738] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 686.786737] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.788666] env[69982]: INFO nova.compute.claims [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.810907] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864145, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73155} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.810907] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 049c7405-3daf-4064-8770-efbbf15c832e/049c7405-3daf-4064-8770-efbbf15c832e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.810907] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.810907] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-37a81b4d-23d7-4cab-83c9-34e0cc5876df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.818188] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3e650367-fe77-492e-9e71-63e7ca53b7ed tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.570s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.821014] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 686.821014] env[69982]: value = "task-3864146" [ 686.821014] env[69982]: _type = "Task" [ 686.821014] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.837412] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864146, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.842993] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c0c27830-bcc3-4362-8b0f-e5c669ad56fc tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.422s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.288680] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.288826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquired lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.288983] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 687.296510] env[69982]: DEBUG nova.compute.utils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 687.299061] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 687.300383] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 687.322521] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 687.336809] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864146, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09692} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.336970] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 687.337933] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5c2b31-fe54-423f-8844-d2d07b782dca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.366512] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 049c7405-3daf-4064-8770-efbbf15c832e/049c7405-3daf-4064-8770-efbbf15c832e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.371923] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e7b5073-6176-4529-a77b-cb6d138aa288 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.400240] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 687.400240] env[69982]: value = "task-3864147" [ 687.400240] env[69982]: _type = "Task" [ 687.400240] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.413009] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864147, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.463687] env[69982]: DEBUG nova.policy [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '12b5b09ff85441259e16bae9087594e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47a586a7d38c41f9bf08d79bb1ca17df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 687.528086] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.528267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.528622] env[69982]: DEBUG nova.network.neutron [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 687.800365] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 687.896395] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.926675] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864147, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.936613] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 688.373856] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16a72e6-f582-436a-a71b-1bd7bcf745e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.385343] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe596746-fadb-4b17-b46c-cb8b0e8435c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.436186] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bda9819-317c-49a7-a319-0caea169ae19 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.445995] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864147, 'name': ReconfigVM_Task, 'duration_secs': 0.605691} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.448522] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 049c7405-3daf-4064-8770-efbbf15c832e/049c7405-3daf-4064-8770-efbbf15c832e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.449355] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-89ea768c-e078-49a4-a113-9df72d8a42cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.452753] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbc5c2e-e6e9-40de-adc5-40db72a85fca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.470365] env[69982]: DEBUG nova.compute.provider_tree [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.476360] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 688.476360] env[69982]: value = "task-3864148" [ 688.476360] env[69982]: _type = "Task" [ 688.476360] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.485740] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864148, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.585449] env[69982]: DEBUG nova.network.neutron [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Updating instance_info_cache with network_info: [{"id": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "address": "fa:16:3e:6a:6d:ea", "network": {"id": "ecb7006a-2cdd-45d7-9169-8e1b826fdb39", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1381035678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "662f6c17d09f4f8b873a6b50f84d8233", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef327e31-93", "ovs_interfaceid": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.649433] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Successfully created port: f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.774478] env[69982]: DEBUG nova.network.neutron [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.820350] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 688.848889] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 688.849203] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 688.849685] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 688.849685] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 688.849685] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 688.849847] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 688.850023] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 688.850438] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 688.850776] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 688.850948] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 688.851279] env[69982]: DEBUG nova.virt.hardware [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 688.852362] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7695fa1d-0187-4829-8c18-642b94e975f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.864638] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bf560e-0b72-42dc-b029-d1f94f7c287b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.970453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.970761] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.975654] env[69982]: DEBUG nova.scheduler.client.report [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.000550] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864148, 'name': Rename_Task, 'duration_secs': 0.195083} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.000822] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 689.001088] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95738271-d39b-423d-9705-3ba24e82eafe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.009903] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 689.009903] env[69982]: value = "task-3864149" [ 689.009903] env[69982]: _type = "Task" [ 689.009903] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.020203] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.089677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Releasing lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.092022] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Instance network_info: |[{"id": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "address": "fa:16:3e:6a:6d:ea", "network": {"id": "ecb7006a-2cdd-45d7-9169-8e1b826fdb39", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1381035678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "662f6c17d09f4f8b873a6b50f84d8233", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef327e31-93", "ovs_interfaceid": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.092239] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6a:6d:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef327e31-93cb-44af-9ebb-5a46cd13075f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.100932] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Creating folder: Project (662f6c17d09f4f8b873a6b50f84d8233). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.101282] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac019631-895f-40e2-9955-a457fc9260de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.114714] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Created folder: Project (662f6c17d09f4f8b873a6b50f84d8233) in parent group-v767796. [ 689.114934] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Creating folder: Instances. Parent ref: group-v767847. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.115211] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11b2492-d5e5-4a1f-9b46-e0421bfbdeee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.129024] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Created folder: Instances in parent group-v767847. [ 689.129024] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.129024] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.129024] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfa99d5a-db94-43b3-a983-81028d71018e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.157308] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.157308] env[69982]: value = "task-3864152" [ 689.157308] env[69982]: _type = "Task" [ 689.157308] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.170108] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864152, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.280063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.476415] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.485433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.486386] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.489540] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.654s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.489815] env[69982]: DEBUG nova.objects.instance [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'resources' on Instance uuid 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.524842] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.669869] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864152, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.814332] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59527d12-fbb4-4388-b2db-64767628991e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.841777] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42353f6-e338-4fe4-9ef7-59cace611e7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.848153] env[69982]: DEBUG nova.compute.manager [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Received event network-vif-plugged-ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.848364] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Acquiring lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.848536] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.848701] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.848867] env[69982]: DEBUG nova.compute.manager [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] No waiting events found dispatching network-vif-plugged-ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 689.849038] env[69982]: WARNING nova.compute.manager [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Received unexpected event network-vif-plugged-ef327e31-93cb-44af-9ebb-5a46cd13075f for instance with vm_state building and task_state spawning. [ 689.849238] env[69982]: DEBUG nova.compute.manager [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Received event network-changed-ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.849396] env[69982]: DEBUG nova.compute.manager [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Refreshing instance network info cache due to event network-changed-ef327e31-93cb-44af-9ebb-5a46cd13075f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 689.849580] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Acquiring lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.849754] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Acquired lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.849923] env[69982]: DEBUG nova.network.neutron [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Refreshing network info cache for port ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 689.858015] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 689.994255] env[69982]: DEBUG nova.compute.utils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.998874] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.999819] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 690.006884] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.024466] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.117913] env[69982]: DEBUG nova.policy [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9865336291ac4440bd18495935352d6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66125bb452294cea84c97f820c3e94ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 690.176154] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864152, 'name': CreateVM_Task, 'duration_secs': 1.014726} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.176455] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 690.177349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.177434] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.177746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 690.178060] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d824b674-8b5c-497e-a1af-517a70e02997 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.184973] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 690.184973] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52adfeae-6cd8-ee38-5326-b0829444c1c2" [ 690.184973] env[69982]: _type = "Task" [ 690.184973] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.203037] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52adfeae-6cd8-ee38-5326-b0829444c1c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.370214] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 690.370548] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebd22f8d-3aec-4c38-ac92-0cafbda7a551 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.385643] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 690.385643] env[69982]: value = "task-3864153" [ 690.385643] env[69982]: _type = "Task" [ 690.385643] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.397865] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.500222] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.537325] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864149, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.541206] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432ad6cc-a86f-4aad-83d6-7206e4183e7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.552187] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40944b0c-c6d3-4ea0-92b8-91f4c72de222 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.613172] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a18beb-adac-4e56-8498-beec27959c2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.624717] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af3135af-2e56-4677-a51c-275445fe4e07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.642037] env[69982]: DEBUG nova.compute.provider_tree [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.703991] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52adfeae-6cd8-ee38-5326-b0829444c1c2, 'name': SearchDatastore_Task, 'duration_secs': 0.020187} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.704530] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.704783] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 690.705185] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.705433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.705643] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.705985] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-809f0a8b-e6ab-4a6e-9335-fd2527324d56 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.719175] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.719513] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.720370] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3772bce-ddda-4b28-ab81-9cbc0803a769 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.728785] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 690.728785] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a73195-c5de-6782-6f02-2e2ad420057f" [ 690.728785] env[69982]: _type = "Task" [ 690.728785] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.742165] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a73195-c5de-6782-6f02-2e2ad420057f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.897637] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.029528] env[69982]: DEBUG oslo_vmware.api [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864149, 'name': PowerOnVM_Task, 'duration_secs': 1.538472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.029815] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.030035] env[69982]: INFO nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Took 9.57 seconds to spawn the instance on the hypervisor. [ 691.030253] env[69982]: DEBUG nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.031041] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2a9387-37c3-4535-ae26-cdcc0cd68dcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.054219] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Successfully created port: 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 691.146032] env[69982]: DEBUG nova.scheduler.client.report [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 691.171324] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 691.172379] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80741b06-5aee-4835-9caa-cb97967eed25 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.180857] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 691.181042] env[69982]: ERROR oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk due to incomplete transfer. [ 691.181275] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-da4a741c-153e-4518-83e9-f5bbe1d8720a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.189458] env[69982]: DEBUG oslo_vmware.rw_handles [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5267689b-c95a-4c27-596b-7e5c3b97a8a7/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 691.190220] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Uploaded image 7363fbc8-a570-4772-a912-a05e3c119422 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 691.191697] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 691.191963] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-40e553ad-9dc0-4d71-b377-2cc688873792 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.201190] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 691.201190] env[69982]: value = "task-3864154" [ 691.201190] env[69982]: _type = "Task" [ 691.201190] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.214661] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864154, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.240552] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a73195-c5de-6782-6f02-2e2ad420057f, 'name': SearchDatastore_Task, 'duration_secs': 0.016863} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.241299] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b845690-75fb-47cf-87e3-3b7e04cde1a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.251193] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 691.251193] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528a09eb-3ced-f4dd-2bca-12d52c2126d5" [ 691.251193] env[69982]: _type = "Task" [ 691.251193] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.259444] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528a09eb-3ced-f4dd-2bca-12d52c2126d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.401922] env[69982]: DEBUG oslo_vmware.api [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864153, 'name': PowerOnVM_Task, 'duration_secs': 0.804101} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.402258] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 691.402458] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e4528ac-4ed8-4cd1-96a7-950c0825db2d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance '35fdf25e-c8c0-4123-a95a-2a4c1a504f99' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 691.412654] env[69982]: DEBUG nova.network.neutron [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Updated VIF entry in instance network info cache for port ef327e31-93cb-44af-9ebb-5a46cd13075f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 691.413055] env[69982]: DEBUG nova.network.neutron [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Updating instance_info_cache with network_info: [{"id": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "address": "fa:16:3e:6a:6d:ea", "network": {"id": "ecb7006a-2cdd-45d7-9169-8e1b826fdb39", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1381035678-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "662f6c17d09f4f8b873a6b50f84d8233", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef327e31-93", "ovs_interfaceid": "ef327e31-93cb-44af-9ebb-5a46cd13075f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.514542] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.552687] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 691.552804] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.552968] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 691.553470] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.553470] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 691.553470] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 691.553883] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 691.554046] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 691.554233] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 691.554412] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 691.554586] env[69982]: DEBUG nova.virt.hardware [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 691.555644] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16300e80-adda-4f8a-8e22-446a8302f3a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.561517] env[69982]: INFO nova.compute.manager [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Took 26.83 seconds to build instance. [ 691.569529] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4079dce-5a4c-4de8-9b20-6fbe0b8b94d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.655344] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.657208] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.710s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.659034] env[69982]: INFO nova.compute.claims [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.695815] env[69982]: INFO nova.scheduler.client.report [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8 [ 691.711633] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864154, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.760514] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528a09eb-3ced-f4dd-2bca-12d52c2126d5, 'name': SearchDatastore_Task, 'duration_secs': 0.011417} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.761417] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.761881] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6efb0df5-0435-424a-b4cc-1eaefdcf388d/6efb0df5-0435-424a-b4cc-1eaefdcf388d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 691.762655] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a556f098-3139-4bc0-a36d-8a03b49075b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.772043] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 691.772043] env[69982]: value = "task-3864155" [ 691.772043] env[69982]: _type = "Task" [ 691.772043] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.787022] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.845104] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Successfully updated port: f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.918120] env[69982]: DEBUG oslo_concurrency.lockutils [req-4c63a008-ccf0-4756-9875-1a82f376ddbd req-392e646d-a045-4b78-8849-269f9dbf94fb service nova] Releasing lock "refresh_cache-6efb0df5-0435-424a-b4cc-1eaefdcf388d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.065415] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aa4c0152-1c1c-4342-aca8-10ed88dc8e03 tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.124s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.210115] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5fc233b1-9440-4887-8c62-490f7ae15b91 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.672s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.221742] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864154, 'name': Destroy_Task, 'duration_secs': 0.996994} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.222081] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Destroyed the VM [ 692.222413] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 692.222684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2470a48b-e7d2-4c83-84ea-9fcdfc8169e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.234730] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 692.234730] env[69982]: value = "task-3864156" [ 692.234730] env[69982]: _type = "Task" [ 692.234730] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.248787] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864156, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.285602] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864155, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.349959] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.353696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquired lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.353696] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 692.691750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "5743a020-0c09-45ec-aca4-5ce367cc201a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.692218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.753116] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864156, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.793096] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.822938} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.793358] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6efb0df5-0435-424a-b4cc-1eaefdcf388d/6efb0df5-0435-424a-b4cc-1eaefdcf388d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 692.793569] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 692.793854] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-551c0783-337d-4ac8-b169-0385f757767a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.802783] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 692.802783] env[69982]: value = "task-3864157" [ 692.802783] env[69982]: _type = "Task" [ 692.802783] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.818427] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864157, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.919015] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 693.198228] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 693.251378] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864156, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.255140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f17f89-be40-4763-93b2-864ade7be7a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.268547] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b25f15-74a9-4e3c-b03a-dddebf68c533 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.322525] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c41ba8-8518-4c7c-b4d6-74b8f92c1c01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.336896] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864157, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096892} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.337825] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 693.339160] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c09fca-4774-4d7d-b54e-842499d9183c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.344226] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657cfca6-5ca2-4997-bffe-31926643f916 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.362892] env[69982]: DEBUG nova.compute.provider_tree [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.389769] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 6efb0df5-0435-424a-b4cc-1eaefdcf388d/6efb0df5-0435-424a-b4cc-1eaefdcf388d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 693.390947] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1e942a5-17a8-4fac-a69c-963e445f214a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.413894] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 693.413894] env[69982]: value = "task-3864158" [ 693.413894] env[69982]: _type = "Task" [ 693.413894] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.428117] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864158, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.599473] env[69982]: DEBUG nova.network.neutron [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Updating instance_info_cache with network_info: [{"id": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "address": "fa:16:3e:92:99:6f", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf12b494d-82", "ovs_interfaceid": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.720068] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.720326] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing instance network info cache due to event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 693.720563] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Acquiring lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.720733] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Acquired lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.720884] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 693.754437] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.755208] env[69982]: DEBUG oslo_vmware.api [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864156, 'name': RemoveSnapshot_Task, 'duration_secs': 1.340013} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.755748] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 693.755983] env[69982]: INFO nova.compute.manager [None req-cd60c111-1ac5-47e7-a6c2-b15c6b494ffb tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 18.00 seconds to snapshot the instance on the hypervisor. [ 693.892236] env[69982]: DEBUG nova.scheduler.client.report [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.929013] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864158, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.000238] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Successfully updated port: 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 694.102720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Releasing lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.103016] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Instance network_info: |[{"id": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "address": "fa:16:3e:92:99:6f", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf12b494d-82", "ovs_interfaceid": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.103550] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:99:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f12b494d-8265-4d7f-82a2-f2ec5ed91285', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.116267] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Creating folder: Project (47a586a7d38c41f9bf08d79bb1ca17df). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.116692] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87ddd248-8d8a-4c75-9531-59360bf1b7a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.130733] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Created folder: Project (47a586a7d38c41f9bf08d79bb1ca17df) in parent group-v767796. [ 694.131018] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Creating folder: Instances. Parent ref: group-v767850. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.131542] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-205604e0-2189-4b83-918c-2094bb1d7ef8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.145538] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Created folder: Instances in parent group-v767850. [ 694.146214] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.146666] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.147239] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-305e5d5e-37cc-4cae-a015-c3cc652a6553 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.172790] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.172790] env[69982]: value = "task-3864161" [ 694.172790] env[69982]: _type = "Task" [ 694.172790] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.182678] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864161, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.399981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.401053] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.404695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.460s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.404921] env[69982]: DEBUG nova.objects.instance [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lazy-loading 'resources' on Instance uuid e82ae1bd-c31b-44ca-9608-9348b8eac8dc {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 694.435895] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864158, 'name': ReconfigVM_Task, 'duration_secs': 0.69023} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.437165] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 6efb0df5-0435-424a-b4cc-1eaefdcf388d/6efb0df5-0435-424a-b4cc-1eaefdcf388d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.437165] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-041d7bc5-f752-41b7-9a3a-cf92eb910c7a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.455052] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 694.455052] env[69982]: value = "task-3864162" [ 694.455052] env[69982]: _type = "Task" [ 694.455052] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.467176] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864162, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.504749] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.504749] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.504749] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 694.582419] env[69982]: DEBUG nova.compute.manager [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 694.582867] env[69982]: DEBUG nova.compute.manager [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing instance network info cache due to event network-changed-d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 694.582867] env[69982]: DEBUG oslo_concurrency.lockutils [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] Acquiring lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.640348] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updated VIF entry in instance network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 694.640348] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [{"id": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "address": "fa:16:3e:52:73:aa", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73ed1df-de", "ovs_interfaceid": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.686410] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864161, 'name': CreateVM_Task, 'duration_secs': 0.507792} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.686586] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 694.688032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.688032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.688032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 694.688342] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4dcd651-8946-46ad-b18e-0310f6177f5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.699036] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 694.699036] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269cffc-df22-c08c-b4da-2c091bee58dd" [ 694.699036] env[69982]: _type = "Task" [ 694.699036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.706369] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269cffc-df22-c08c-b4da-2c091bee58dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.909086] env[69982]: DEBUG nova.compute.utils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 694.914275] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 694.914524] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 694.971841] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864162, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.988979] env[69982]: DEBUG nova.policy [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b448d520c5de477f8f42581cb5098917', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f77a1e221ed742f9b4ffae2507db7d83', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 695.095833] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 695.143964] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Releasing lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.144632] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Received event network-vif-plugged-f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.144846] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Acquiring lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.145074] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.145297] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.145424] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] No waiting events found dispatching network-vif-plugged-f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 695.145564] env[69982]: WARNING nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Received unexpected event network-vif-plugged-f12b494d-8265-4d7f-82a2-f2ec5ed91285 for instance with vm_state building and task_state spawning. [ 695.145728] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Received event network-changed-f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.145884] env[69982]: DEBUG nova.compute.manager [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Refreshing instance network info cache due to event network-changed-f12b494d-8265-4d7f-82a2-f2ec5ed91285. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 695.146054] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Acquiring lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.146189] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Acquired lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.146346] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Refreshing network info cache for port f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 695.148501] env[69982]: DEBUG oslo_concurrency.lockutils [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] Acquired lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.148699] env[69982]: DEBUG nova.network.neutron [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Refreshing network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 695.215457] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269cffc-df22-c08c-b4da-2c091bee58dd, 'name': SearchDatastore_Task, 'duration_secs': 0.021873} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.218972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.219972] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.219972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.219972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.220264] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.223562] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76c28f40-8a46-4772-99c9-9f6f52b810c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.247952] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.248208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.248974] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d5554ba-f3bb-49dc-b0be-09f97e8a0bcb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.257551] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 695.257551] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b628-d9bc-db3f-b152-0f3b977fc87b" [ 695.257551] env[69982]: _type = "Task" [ 695.257551] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.269541] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b628-d9bc-db3f-b152-0f3b977fc87b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.323510] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.323792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.417730] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.442810] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Successfully created port: 99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.449050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5d1ca2-ead0-43f9-a388-f0f5318a7de1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.465147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4a40eb-c0af-49db-a3ff-71023c7888cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.475827] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864162, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.507253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cd5f04-964c-40f5-a05f-ca54c6302b21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.516687] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29ec48e-8ced-4053-914d-d3408ca858d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.538339] env[69982]: DEBUG nova.compute.provider_tree [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.726626] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.726973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.770496] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b628-d9bc-db3f-b152-0f3b977fc87b, 'name': SearchDatastore_Task, 'duration_secs': 0.01834} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.773952] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65c53105-4734-45b3-a469-e983715a286c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.780651] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 695.780651] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c263bb-0060-2e2f-394a-21ac2ad39af4" [ 695.780651] env[69982]: _type = "Task" [ 695.780651] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.789784] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c263bb-0060-2e2f-394a-21ac2ad39af4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.828827] env[69982]: DEBUG nova.network.neutron [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.982506] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864162, 'name': Rename_Task, 'duration_secs': 1.249921} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.987238] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.987238] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d505e353-1a28-43a7-b0c4-0516055cb53d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.994605] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 695.994605] env[69982]: value = "task-3864163" [ 695.994605] env[69982]: _type = "Task" [ 695.994605] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.006773] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.038169] env[69982]: DEBUG nova.network.neutron [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updated VIF entry in instance network info cache for port d73ed1df-de67-4db7-82b5-7f693bab3f55. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 696.038169] env[69982]: DEBUG nova.network.neutron [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [{"id": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "address": "fa:16:3e:52:73:aa", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd73ed1df-de", "ovs_interfaceid": "d73ed1df-de67-4db7-82b5-7f693bab3f55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.040320] env[69982]: DEBUG nova.scheduler.client.report [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 696.146423] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Updated VIF entry in instance network info cache for port f12b494d-8265-4d7f-82a2-f2ec5ed91285. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 696.150016] env[69982]: DEBUG nova.network.neutron [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Updating instance_info_cache with network_info: [{"id": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "address": "fa:16:3e:92:99:6f", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf12b494d-82", "ovs_interfaceid": "f12b494d-8265-4d7f-82a2-f2ec5ed91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.298725] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c263bb-0060-2e2f-394a-21ac2ad39af4, 'name': SearchDatastore_Task, 'duration_secs': 0.026664} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.299325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.299431] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 78ba4fa9-4083-4204-a5b4-88cdcec6ca13/78ba4fa9-4083-4204-a5b4-88cdcec6ca13.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.299943] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d70aae9-d199-42e6-8ee5-de22c2691149 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.313220] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 696.313220] env[69982]: value = "task-3864164" [ 696.313220] env[69982]: _type = "Task" [ 696.313220] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.323724] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.331511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.331949] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Instance network_info: |[{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 696.332444] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:a9:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44b0fa67-6d20-4612-a177-c8b0ed39faaf', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 696.343466] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Creating folder: Project (66125bb452294cea84c97f820c3e94ce). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 696.343466] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92d45e51-c2bc-44f4-8eac-3cd4dcb44b41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.358117] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Created folder: Project (66125bb452294cea84c97f820c3e94ce) in parent group-v767796. [ 696.358543] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Creating folder: Instances. Parent ref: group-v767853. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 696.358989] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be69d758-d6d9-4f42-909e-b8334bf73bec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.373999] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Created folder: Instances in parent group-v767853. [ 696.374326] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.374770] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 696.375062] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3ee8d84-04ab-4945-8b45-5711e9334f6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.399225] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 696.399225] env[69982]: value = "task-3864167" [ 696.399225] env[69982]: _type = "Task" [ 696.399225] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.409478] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864167, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.430876] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.491543] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 696.492118] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.492534] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 696.492876] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.493208] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 696.496023] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 696.496023] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 696.496023] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 696.496023] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 696.496023] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 696.496458] env[69982]: DEBUG nova.virt.hardware [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 696.496458] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc463eea-fa96-4df0-9831-8315c29f573d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.514170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e15b3d-5cd5-4228-bd48-89fdb0fb9fb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.518699] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864163, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.545505] env[69982]: DEBUG oslo_concurrency.lockutils [req-447e7dc2-2f5a-4a3a-8864-c159d1aeb59e req-68d366c5-81b9-497f-8f7c-37d23058c43b service nova] Releasing lock "refresh_cache-2d554902-bf28-4ee2-b9d6-4219e54246fc" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.545949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.549189] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.619s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.550986] env[69982]: INFO nova.compute.claims [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.587843] env[69982]: INFO nova.scheduler.client.report [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Deleted allocations for instance e82ae1bd-c31b-44ca-9608-9348b8eac8dc [ 696.651105] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d56dbde-4138-4ad2-96e0-3a5deb43fe04 req-cc7a7c49-ccc6-4569-90cd-584aa14beb11 service nova] Releasing lock "refresh_cache-78ba4fa9-4083-4204-a5b4-88cdcec6ca13" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.828300] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864164, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.845312] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.845593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.912189] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864167, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.011512] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864163, 'name': PowerOnVM_Task} progress is 82%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.103681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfd3e660-290f-4408-a060-86797e5228df tempest-TenantUsagesTestJSON-1388019932 tempest-TenantUsagesTestJSON-1388019932-project-member] Lock "e82ae1bd-c31b-44ca-9608-9348b8eac8dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.195s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.107153] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "25957956-0d50-4b4f-8e5c-f55a1e182235" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.107597] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.326337] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579614} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.326626] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 78ba4fa9-4083-4204-a5b4-88cdcec6ca13/78ba4fa9-4083-4204-a5b4-88cdcec6ca13.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.326848] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.327211] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cad03bd8-105d-4051-b1ed-5dede8f70453 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.335629] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 697.335629] env[69982]: value = "task-3864168" [ 697.335629] env[69982]: _type = "Task" [ 697.335629] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.346785] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864168, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.419006] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864167, 'name': CreateVM_Task, 'duration_secs': 0.535449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.419510] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 697.420305] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.420487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.420822] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 697.421113] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a429e38-790f-4a8e-85c6-6924d240584a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.427790] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 697.427790] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ed9cb3-ca6a-9e2d-1e6a-e90d7e77e54c" [ 697.427790] env[69982]: _type = "Task" [ 697.427790] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.436714] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ed9cb3-ca6a-9e2d-1e6a-e90d7e77e54c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.510750] env[69982]: DEBUG oslo_vmware.api [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864163, 'name': PowerOnVM_Task, 'duration_secs': 1.129008} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.511010] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.511451] env[69982]: INFO nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Took 13.34 seconds to spawn the instance on the hypervisor. [ 697.511451] env[69982]: DEBUG nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.512443] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6268f823-44f6-423c-abb4-25ac82f63167 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.676089] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Successfully updated port: 99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.849088] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864168, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.25336} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.850027] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 697.850893] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4eea157-5c38-4e4f-a539-e2b1e43a2b30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.876023] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 78ba4fa9-4083-4204-a5b4-88cdcec6ca13/78ba4fa9-4083-4204-a5b4-88cdcec6ca13.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 697.882892] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c336472-a575-463c-9d1c-2c538bfd2d6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.913831] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 697.913831] env[69982]: value = "task-3864169" [ 697.913831] env[69982]: _type = "Task" [ 697.913831] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.936615] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864169, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.946786] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ed9cb3-ca6a-9e2d-1e6a-e90d7e77e54c, 'name': SearchDatastore_Task, 'duration_secs': 0.049173} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.951013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.952746] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 697.952746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.952746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.952746] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 697.952746] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3c40a4a-8c63-4002-b4ec-e8f141c17ac2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.970720] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 697.970720] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 697.973413] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63af5ece-94c8-45c7-8e3e-19cda2b0af0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.975901] env[69982]: DEBUG nova.compute.manager [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-vif-plugged-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.976363] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Acquiring lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.976476] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.976641] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.976806] env[69982]: DEBUG nova.compute.manager [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] No waiting events found dispatching network-vif-plugged-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.976969] env[69982]: WARNING nova.compute.manager [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received unexpected event network-vif-plugged-44b0fa67-6d20-4612-a177-c8b0ed39faaf for instance with vm_state building and task_state spawning. [ 697.977232] env[69982]: DEBUG nova.compute.manager [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.977340] env[69982]: DEBUG nova.compute.manager [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing instance network info cache due to event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 697.977544] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.977675] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.977830] env[69982]: DEBUG nova.network.neutron [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 697.991679] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 697.991679] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d45f19-02b5-e3d5-ec99-1af7b0f1238c" [ 697.991679] env[69982]: _type = "Task" [ 697.991679] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.003037] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d45f19-02b5-e3d5-ec99-1af7b0f1238c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.031137] env[69982]: INFO nova.compute.manager [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Took 30.22 seconds to build instance. [ 698.147114] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3e2d95-afe8-4fc5-b13d-bcbcc80c262c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.156029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca54041d-43b0-4bce-9a9a-e9a6c96663a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.160176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.160438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.160649] env[69982]: DEBUG nova.compute.manager [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Going to confirm migration 1 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 698.196150] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.196150] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.196150] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 698.200304] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f1a5fb-e970-4477-b1bc-b21370e30e77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.209810] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1ea26a-94d9-49f4-b2e8-7ffa541c8c16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.231672] env[69982]: DEBUG nova.compute.provider_tree [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.426650] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864169, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.504926] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d45f19-02b5-e3d5-ec99-1af7b0f1238c, 'name': SearchDatastore_Task, 'duration_secs': 0.037062} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.506291] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8ae8a0b-8a69-4b71-8076-78dc1eea0e69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.514946] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 698.514946] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213107d-8d8d-5f5a-e635-0d1f041b6878" [ 698.514946] env[69982]: _type = "Task" [ 698.514946] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.528421] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213107d-8d8d-5f5a-e635-0d1f041b6878, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.533712] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10f6bbd5-71c8-4c1e-a472-5cc75436be89 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.432s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.570942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.571220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.735939] env[69982]: DEBUG nova.scheduler.client.report [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.760521] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "049c7405-3daf-4064-8770-efbbf15c832e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.761123] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.761453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "049c7405-3daf-4064-8770-efbbf15c832e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.761453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.761664] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.765274] env[69982]: INFO nova.compute.manager [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Terminating instance [ 698.773466] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.773692] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 698.773921] env[69982]: DEBUG nova.network.neutron [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 698.774181] env[69982]: DEBUG nova.objects.instance [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lazy-loading 'info_cache' on Instance uuid 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.872131] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 698.928652] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.029202] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213107d-8d8d-5f5a-e635-0d1f041b6878, 'name': SearchDatastore_Task, 'duration_secs': 0.012743} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.029976] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.031047] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 589419ea-c609-45bb-bde5-3b22d9ff111e/589419ea-c609-45bb-bde5-3b22d9ff111e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 699.031337] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d12e378-bd7c-4c05-8097-cc4a4b8d76d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.038800] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 699.038800] env[69982]: value = "task-3864170" [ 699.038800] env[69982]: _type = "Task" [ 699.038800] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.039438] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 699.055944] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864170, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.244657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.245413] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 699.248687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.309s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.250785] env[69982]: INFO nova.compute.claims [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.280904] env[69982]: DEBUG nova.compute.manager [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 699.284145] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.284145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243f5bce-0dd2-472c-a46f-31f868808f30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.297693] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 699.298282] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15b5b62e-2cde-4922-9e95-d51405edc9bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.311929] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 699.311929] env[69982]: value = "task-3864171" [ 699.311929] env[69982]: _type = "Task" [ 699.311929] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.327814] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.427132] env[69982]: DEBUG nova.network.neutron [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updated VIF entry in instance network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 699.429433] env[69982]: DEBUG nova.network.neutron [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.434367] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864169, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.555932] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864170, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.567464] env[69982]: DEBUG nova.network.neutron [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Updating instance_info_cache with network_info: [{"id": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "address": "fa:16:3e:22:ed:a1", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99cd9e66-31", "ovs_interfaceid": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.591679] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.759587] env[69982]: DEBUG nova.compute.utils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 699.763997] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 699.765597] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 699.824761] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864171, 'name': PowerOffVM_Task, 'duration_secs': 0.250424} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.824866] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 699.825032] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 699.826539] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38c52917-154a-4c3b-8e2c-9cd3328b13c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.893109] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.893457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.893667] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Deleting the datastore file [datastore2] 049c7405-3daf-4064-8770-efbbf15c832e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.893967] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ad74afc-d12c-4bc4-b505-206b42dbdbec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.902692] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for the task: (returnval){ [ 699.902692] env[69982]: value = "task-3864173" [ 699.902692] env[69982]: _type = "Task" [ 699.902692] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.914641] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864173, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.916690] env[69982]: DEBUG nova.policy [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fa4344cb58643f3a9988e5fc5f7a17d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '100f7b204a534b4abc5ac945fd4c5393', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 699.929474] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864169, 'name': ReconfigVM_Task, 'duration_secs': 1.714082} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.929804] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 78ba4fa9-4083-4204-a5b4-88cdcec6ca13/78ba4fa9-4083-4204-a5b4-88cdcec6ca13.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.930782] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d159013-0537-4f81-9f6e-dc38fcba3bd7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.936591] env[69982]: DEBUG oslo_concurrency.lockutils [req-96327632-8f74-46c4-86ef-3fd758de2af2 req-69514789-f164-476c-8bb1-ab1c1c7413e9 service nova] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.939298] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 699.939298] env[69982]: value = "task-3864174" [ 699.939298] env[69982]: _type = "Task" [ 699.939298] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.950111] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864174, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.054609] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864170, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.629287} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.054609] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 589419ea-c609-45bb-bde5-3b22d9ff111e/589419ea-c609-45bb-bde5-3b22d9ff111e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 700.054609] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 700.054609] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2460c174-34ef-4c07-bdb7-ef6d0b45fbc1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.064802] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 700.064802] env[69982]: value = "task-3864175" [ 700.064802] env[69982]: _type = "Task" [ 700.064802] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.077085] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.077574] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Instance network_info: |[{"id": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "address": "fa:16:3e:22:ed:a1", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99cd9e66-31", "ovs_interfaceid": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 700.079404] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:ed:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99cd9e66-3189-4fac-9e08-6cf8f661693e', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 700.086536] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.087314] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 700.087576] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8ad11ed-9977-4a9d-a5ca-25f421d8ce38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.108330] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864175, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.114792] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 700.114792] env[69982]: value = "task-3864176" [ 700.114792] env[69982]: _type = "Task" [ 700.114792] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.125830] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864176, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.264607] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 700.416778] env[69982]: DEBUG oslo_vmware.api [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Task: {'id': task-3864173, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143499} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.417318] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.417635] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 700.417720] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.417881] env[69982]: INFO nova.compute.manager [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 700.418102] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.418395] env[69982]: DEBUG nova.compute.manager [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 700.418395] env[69982]: DEBUG nova.network.neutron [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 700.462069] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864174, 'name': Rename_Task, 'duration_secs': 0.221093} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.464823] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.466227] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6f0bd12-852d-44a8-ac37-0f4d01f5a055 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.475286] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 700.475286] env[69982]: value = "task-3864177" [ 700.475286] env[69982]: _type = "Task" [ 700.475286] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.487058] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.582026] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864175, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096774} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.584687] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 700.585737] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c758a918-c7dd-4730-ae39-9d8717496f84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.609736] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 589419ea-c609-45bb-bde5-3b22d9ff111e/589419ea-c609-45bb-bde5-3b22d9ff111e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 700.614192] env[69982]: DEBUG nova.network.neutron [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [{"id": "5d41e03b-3fd8-4389-a457-2000cf628f86", "address": "fa:16:3e:d4:22:c4", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.77", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d41e03b-3f", "ovs_interfaceid": "5d41e03b-3fd8-4389-a457-2000cf628f86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.615569] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d67bb838-e446-4cac-bebc-fca9a1236f60 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.645842] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864176, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.648021] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 700.648021] env[69982]: value = "task-3864178" [ 700.648021] env[69982]: _type = "Task" [ 700.648021] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.667327] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.868698] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c325e5d2-d3d3-4082-9524-6ff1e83b1358 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.877162] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5abb191-3014-47a4-9397-6d56a88ccbcb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.913361] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c75a26a-50b3-4a93-af18-ac4188306d01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.923242] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99379d7e-292d-4209-ba4b-5e7a438151b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.940164] env[69982]: DEBUG nova.compute.provider_tree [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.987968] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.133849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-35fdf25e-c8c0-4123-a95a-2a4c1a504f99" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.134143] env[69982]: DEBUG nova.objects.instance [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lazy-loading 'migration_context' on Instance uuid 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.147030] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864176, 'name': CreateVM_Task, 'duration_secs': 0.606923} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.147268] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 701.148254] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.148432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.149533] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 701.149533] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241cdaf6-dd4e-4b44-86b5-8affcb5ee9f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.160323] env[69982]: DEBUG nova.compute.manager [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Received event network-vif-plugged-99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.160587] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Acquiring lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.160820] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.161014] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.161342] env[69982]: DEBUG nova.compute.manager [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] No waiting events found dispatching network-vif-plugged-99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.161441] env[69982]: WARNING nova.compute.manager [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Received unexpected event network-vif-plugged-99cd9e66-3189-4fac-9e08-6cf8f661693e for instance with vm_state building and task_state spawning. [ 701.161591] env[69982]: DEBUG nova.compute.manager [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Received event network-changed-99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.161761] env[69982]: DEBUG nova.compute.manager [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Refreshing instance network info cache due to event network-changed-99cd9e66-3189-4fac-9e08-6cf8f661693e. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 701.162124] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Acquiring lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.162293] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Acquired lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.162458] env[69982]: DEBUG nova.network.neutron [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Refreshing network info cache for port 99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 701.169654] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.170051] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 701.170051] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52231278-173d-fc81-488d-88d758f9738f" [ 701.170051] env[69982]: _type = "Task" [ 701.170051] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.182521] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52231278-173d-fc81-488d-88d758f9738f, 'name': SearchDatastore_Task, 'duration_secs': 0.013942} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.182827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.183218] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 701.183475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.183620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.184581] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.184581] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0779bd22-05a1-4612-8af4-1c5a94c10fae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.194092] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.194345] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 701.195202] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c43bd18-2797-439c-8e9d-53968e4903d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.202613] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 701.202613] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c3fc0f-ea38-466e-bd1b-2167e323f1c4" [ 701.202613] env[69982]: _type = "Task" [ 701.202613] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.218021] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c3fc0f-ea38-466e-bd1b-2167e323f1c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.279725] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 701.286186] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Successfully created port: 3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.306052] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 701.306348] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 701.306476] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 701.307267] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 701.310275] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 701.310505] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 701.310905] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 701.310905] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 701.311098] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 701.311304] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 701.311485] env[69982]: DEBUG nova.virt.hardware [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 701.312846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004f235d-3485-4995-a1b2-b5915d727c81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.322318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b0c7c8-3854-47e0-b6bd-305300fea91e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.447922] env[69982]: DEBUG nova.scheduler.client.report [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.454342] env[69982]: DEBUG nova.network.neutron [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.467526] env[69982]: DEBUG nova.compute.manager [req-c67a658f-d43b-464a-b24a-708861b60496 req-57176492-fc30-4386-919b-47d997ff760f service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Received event network-vif-deleted-98f315fb-5979-46b6-aa4e-c58039f5c013 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.467742] env[69982]: INFO nova.compute.manager [req-c67a658f-d43b-464a-b24a-708861b60496 req-57176492-fc30-4386-919b-47d997ff760f service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Neutron deleted interface 98f315fb-5979-46b6-aa4e-c58039f5c013; detaching it from the instance and deleting it from the info cache [ 701.467908] env[69982]: DEBUG nova.network.neutron [req-c67a658f-d43b-464a-b24a-708861b60496 req-57176492-fc30-4386-919b-47d997ff760f service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.491096] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.640243] env[69982]: DEBUG nova.objects.base [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Object Instance<35fdf25e-c8c0-4123-a95a-2a4c1a504f99> lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 701.641256] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc1b15a-119b-4fcb-8250-d5948beedf1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.670071] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe9b963-16da-45d1-ae22-79db5ba5cbe9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.684502] env[69982]: DEBUG oslo_vmware.api [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 701.684502] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5222fe42-5e50-e59a-5fe9-e40c86cca13c" [ 701.684502] env[69982]: _type = "Task" [ 701.684502] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.684502] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864178, 'name': ReconfigVM_Task, 'duration_secs': 0.658701} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.684502] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 589419ea-c609-45bb-bde5-3b22d9ff111e/589419ea-c609-45bb-bde5-3b22d9ff111e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 701.688078] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6369aa44-f046-46cf-8538-845918dbd082 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.697619] env[69982]: DEBUG oslo_vmware.api [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5222fe42-5e50-e59a-5fe9-e40c86cca13c, 'name': SearchDatastore_Task, 'duration_secs': 0.007674} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.699129] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.699522] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 701.699522] env[69982]: value = "task-3864179" [ 701.699522] env[69982]: _type = "Task" [ 701.699522] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.712813] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c3fc0f-ea38-466e-bd1b-2167e323f1c4, 'name': SearchDatastore_Task, 'duration_secs': 0.013206} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.717868] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864179, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.718247] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c9d1d00-c6e8-4dc7-8340-fddfd7898b68 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.725904] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 701.725904] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522a5c5f-ed9f-6dbf-11fc-9d0d62bd9508" [ 701.725904] env[69982]: _type = "Task" [ 701.725904] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.745168] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522a5c5f-ed9f-6dbf-11fc-9d0d62bd9508, 'name': SearchDatastore_Task, 'duration_secs': 0.010909} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.745416] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.745679] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9d1b0a5f-e096-4641-a077-f0949135efbb/9d1b0a5f-e096-4641-a077-f0949135efbb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 701.745965] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6f05919-ad4c-461c-be70-db18fbede05d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.753154] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 701.753154] env[69982]: value = "task-3864180" [ 701.753154] env[69982]: _type = "Task" [ 701.753154] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.762175] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.955390] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.955463] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 701.960825] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.135s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.961540] env[69982]: INFO nova.compute.claims [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 701.968700] env[69982]: INFO nova.compute.manager [-] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Took 1.55 seconds to deallocate network for instance. [ 701.971131] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a80ae9b0-e57a-47c0-8e42-4a1a868bbfad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.000326] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02b6526-ff91-4367-96b0-a1f5d091bb9b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.018033] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.049713] env[69982]: DEBUG nova.compute.manager [req-c67a658f-d43b-464a-b24a-708861b60496 req-57176492-fc30-4386-919b-47d997ff760f service nova] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Detach interface failed, port_id=98f315fb-5979-46b6-aa4e-c58039f5c013, reason: Instance 049c7405-3daf-4064-8770-efbbf15c832e could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 702.169680] env[69982]: DEBUG nova.network.neutron [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Updated VIF entry in instance network info cache for port 99cd9e66-3189-4fac-9e08-6cf8f661693e. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 702.170058] env[69982]: DEBUG nova.network.neutron [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Updating instance_info_cache with network_info: [{"id": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "address": "fa:16:3e:22:ed:a1", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99cd9e66-31", "ovs_interfaceid": "99cd9e66-3189-4fac-9e08-6cf8f661693e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.214122] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864179, 'name': Rename_Task, 'duration_secs': 0.173559} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.214433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 702.214690] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5518b2c4-7fed-4006-90f6-6114d3b15980 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.224304] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 702.224304] env[69982]: value = "task-3864181" [ 702.224304] env[69982]: _type = "Task" [ 702.224304] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.234605] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864181, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.268754] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864180, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.472102] env[69982]: DEBUG nova.compute.utils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 702.480379] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 702.480379] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.486216] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.490296] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.531518] env[69982]: DEBUG nova.policy [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3b50c0a41794b3e9032bd2c8f62f0b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20ab85756df94226800ca6a415d05d7b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 702.675516] env[69982]: DEBUG oslo_concurrency.lockutils [req-e16b7aab-7fea-4dfa-88d1-53cc041dcf46 req-41df0a88-cdec-4b25-a677-4c939fad7e1b service nova] Releasing lock "refresh_cache-9d1b0a5f-e096-4641-a077-f0949135efbb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.736297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.736742] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.748393] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864181, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.767263] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574904} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.767560] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9d1b0a5f-e096-4641-a077-f0949135efbb/9d1b0a5f-e096-4641-a077-f0949135efbb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 702.767777] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 702.768149] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d30418a-474c-4296-9b35-fdb6180bf2e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.782927] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 702.782927] env[69982]: value = "task-3864182" [ 702.782927] env[69982]: _type = "Task" [ 702.782927] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.795566] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.932628] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Successfully created port: 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.977741] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 702.996920] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.194032] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Successfully updated port: 3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.254770] env[69982]: DEBUG oslo_vmware.api [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864181, 'name': PowerOnVM_Task, 'duration_secs': 0.535655} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.254868] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 703.255019] env[69982]: INFO nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Took 11.74 seconds to spawn the instance on the hypervisor. [ 703.255208] env[69982]: DEBUG nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 703.256057] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706a7f34-e5dc-46f2-8b55-c2bc6feb1b8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.299505] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105977} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.300234] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 703.300839] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c8246c-2ce3-4786-a72d-2ad606848df5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.328318] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 9d1b0a5f-e096-4641-a077-f0949135efbb/9d1b0a5f-e096-4641-a077-f0949135efbb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 703.333031] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efabd57c-7539-40ed-b697-09827cf64372 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.358790] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 703.358790] env[69982]: value = "task-3864183" [ 703.358790] env[69982]: _type = "Task" [ 703.358790] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.372359] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.511894] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 1%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.605359] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0defb5-f95c-4cb1-be97-66f0b2c6971d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.618181] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f95815d-2887-4f60-b548-23c82c0d2fa3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.659455] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c314df-1f14-4d55-9e6f-b96c0d0de31f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.669040] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cbe409-c504-49e1-947c-cd1d81858fda {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.689446] env[69982]: DEBUG nova.compute.provider_tree [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.698871] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.699044] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquired lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.699282] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 703.786649] env[69982]: INFO nova.compute.manager [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Took 31.03 seconds to build instance. [ 703.858881] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.859303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.871175] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.002608] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.010907] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.039437] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.039834] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.039876] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.040526] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.040719] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.040902] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.041148] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.041363] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.041497] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.041710] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.041898] env[69982]: DEBUG nova.virt.hardware [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.042879] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4718f00-9b0a-40e8-89ee-5bdb4eadb3a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.053566] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd30f2b1-8661-4629-b861-5e4420626755 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.073628] env[69982]: DEBUG nova.compute.manager [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Received event network-vif-plugged-3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 704.073628] env[69982]: DEBUG oslo_concurrency.lockutils [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] Acquiring lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.073628] env[69982]: DEBUG oslo_concurrency.lockutils [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.073628] env[69982]: DEBUG oslo_concurrency.lockutils [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.073628] env[69982]: DEBUG nova.compute.manager [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] No waiting events found dispatching network-vif-plugged-3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 704.074189] env[69982]: WARNING nova.compute.manager [req-d751b56a-4905-4dc0-bd20-35c60cece7d2 req-ccc0ed44-5ded-4203-aa95-32763d1d3986 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Received unexpected event network-vif-plugged-3abe4cd5-150d-42ef-b772-c706ee0b6a33 for instance with vm_state building and task_state spawning. [ 704.192688] env[69982]: DEBUG nova.scheduler.client.report [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.252601] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 704.288846] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3069813c-db4d-4656-9d6c-6c98e9a1ccc6 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.147s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.359725] env[69982]: DEBUG nova.compute.manager [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 704.361515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ddf79b2-900d-493e-8a96-91cf2d0e3195 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.383150] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864183, 'name': ReconfigVM_Task, 'duration_secs': 0.682358} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.387658] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 9d1b0a5f-e096-4641-a077-f0949135efbb/9d1b0a5f-e096-4641-a077-f0949135efbb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 704.388669] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f593e0ba-7542-4603-aec4-afdb5c6d675f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.399850] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 704.399850] env[69982]: value = "task-3864184" [ 704.399850] env[69982]: _type = "Task" [ 704.399850] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.412128] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864184, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.469630] env[69982]: DEBUG nova.network.neutron [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Updating instance_info_cache with network_info: [{"id": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "address": "fa:16:3e:29:d9:c9", "network": {"id": "950bf596-c0f5-4c51-8c15-6976fd97c2f8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-706333667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100f7b204a534b4abc5ac945fd4c5393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3abe4cd5-15", "ovs_interfaceid": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.505072] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.646768] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Successfully updated port: 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 704.698546] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.699135] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 704.703157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.224s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.703413] env[69982]: DEBUG nova.objects.instance [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lazy-loading 'resources' on Instance uuid 5c9b6dc6-887e-477a-b902-135fe06cfbbd {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 704.756143] env[69982]: DEBUG nova.compute.manager [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-vif-plugged-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 704.756397] env[69982]: DEBUG oslo_concurrency.lockutils [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] Acquiring lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.756627] env[69982]: DEBUG oslo_concurrency.lockutils [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] Lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.756872] env[69982]: DEBUG oslo_concurrency.lockutils [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] Lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.756960] env[69982]: DEBUG nova.compute.manager [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] No waiting events found dispatching network-vif-plugged-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 704.757777] env[69982]: WARNING nova.compute.manager [req-05a26ab0-a887-4ed0-95a3-784cc78075ee req-d78ae87b-6e15-461f-918c-e99a18842f9d service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received unexpected event network-vif-plugged-6db4163b-49c9-41e1-bfbb-098d83fd379d for instance with vm_state building and task_state spawning. [ 704.791361] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 704.888557] env[69982]: INFO nova.compute.manager [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] instance snapshotting [ 704.891730] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe011050-973e-49a6-aef5-580c4d79d381 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.917139] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91ecc2c-4c1d-4902-b74d-97fb79ca9ae8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.927267] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864184, 'name': Rename_Task, 'duration_secs': 0.265955} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.929020] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 704.932798] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df29fe0a-c6ad-49f8-a3df-e882541c62dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.945061] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 704.945061] env[69982]: value = "task-3864185" [ 704.945061] env[69982]: _type = "Task" [ 704.945061] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.960718] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.975201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Releasing lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.975563] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Instance network_info: |[{"id": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "address": "fa:16:3e:29:d9:c9", "network": {"id": "950bf596-c0f5-4c51-8c15-6976fd97c2f8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-706333667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100f7b204a534b4abc5ac945fd4c5393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3abe4cd5-15", "ovs_interfaceid": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 704.976030] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:d9:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92233552-2c0c-416e-9bf3-bfcca8eda2dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3abe4cd5-150d-42ef-b772-c706ee0b6a33', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 704.985866] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Creating folder: Project (100f7b204a534b4abc5ac945fd4c5393). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 704.986579] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dc028f60-52d3-4c8e-b3b7-0c4f11162bc7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.999310] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Created folder: Project (100f7b204a534b4abc5ac945fd4c5393) in parent group-v767796. [ 704.999310] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Creating folder: Instances. Parent ref: group-v767857. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.000070] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8609bc91-6b85-484a-b185-b71024d9246e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.005449] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task} progress is 74%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.016968] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Created folder: Instances in parent group-v767857. [ 705.017314] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.017538] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.017805] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8a54cfb8-0583-4272-8da1-1e820ce80ca8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.041081] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.041081] env[69982]: value = "task-3864188" [ 705.041081] env[69982]: _type = "Task" [ 705.041081] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.054195] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864188, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.150481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.150752] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.151022] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 705.210353] env[69982]: DEBUG nova.compute.utils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 705.212940] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.212940] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.260971] env[69982]: DEBUG nova.policy [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c84472005ef43d99658fa6f5cf59bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07f7b975ecb449a290e2ae6582e07016', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 705.330914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.440360] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 705.444661] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7ee5d83b-583f-430a-85a4-779f09b675ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.460573] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 705.460573] env[69982]: value = "task-3864189" [ 705.460573] env[69982]: _type = "Task" [ 705.460573] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.464840] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864185, 'name': PowerOnVM_Task} progress is 64%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.484864] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864189, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.515107] env[69982]: DEBUG oslo_vmware.api [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864177, 'name': PowerOnVM_Task, 'duration_secs': 4.888199} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.515107] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 705.515107] env[69982]: INFO nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Took 16.70 seconds to spawn the instance on the hypervisor. [ 705.515107] env[69982]: DEBUG nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.516195] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585ec31a-3b2d-4725-b847-76131862b434 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.555269] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864188, 'name': CreateVM_Task, 'duration_secs': 0.390957} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.556046] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 705.557637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.557956] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.559353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 705.563728] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b910cf6c-6ed6-417e-9c27-f204fc3da1ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.571276] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 705.571276] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52771389-9af6-f278-e8a9-35d4b11b7953" [ 705.571276] env[69982]: _type = "Task" [ 705.571276] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.581813] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52771389-9af6-f278-e8a9-35d4b11b7953, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.716110] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.722746] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 705.723511] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 705.737888] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 705.818646] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Successfully created port: ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.835819] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0daf64e0-1b90-4c63-a81f-25d84628b7a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.854050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ade33f23-f766-4dc3-9be8-274861a4e389 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.887344] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce85ccf-4c9f-46d0-9977-1b01df2a3d8a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.899337] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a368ef4-8eb2-49a8-b1e3-6ae6ec7ab2ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.916413] env[69982]: DEBUG nova.compute.provider_tree [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.954023] env[69982]: DEBUG nova.network.neutron [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.962801] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864185, 'name': PowerOnVM_Task} progress is 73%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.974469] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864189, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.976652] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "6de35617-22cf-4a32-8651-28ea67532b8f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.977072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.977238] env[69982]: DEBUG nova.compute.manager [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 705.978396] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61740bb6-96c8-40d6-9726-590f8652a810 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.986409] env[69982]: DEBUG nova.compute.manager [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 705.987036] env[69982]: DEBUG nova.objects.instance [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lazy-loading 'flavor' on Instance uuid 6de35617-22cf-4a32-8651-28ea67532b8f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 706.041058] env[69982]: INFO nova.compute.manager [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Took 36.34 seconds to build instance. [ 706.083969] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52771389-9af6-f278-e8a9-35d4b11b7953, 'name': SearchDatastore_Task, 'duration_secs': 0.011499} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.084291] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.084522] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 706.084769] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.084983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.085163] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 706.085494] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04a14b94-deea-4a1e-a70c-3fe77b74a44c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.096830] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 706.096830] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 706.097096] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9bef234-2266-4d66-a413-89798bd1eb29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.103353] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 706.103353] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f1c4b-e756-9c5e-d93a-a0393d435935" [ 706.103353] env[69982]: _type = "Task" [ 706.103353] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.112710] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f1c4b-e756-9c5e-d93a-a0393d435935, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.239603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.240080] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 706.240080] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 706.419863] env[69982]: DEBUG nova.scheduler.client.report [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 706.460330] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.460756] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Instance network_info: |[{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 706.465732] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:34:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b7a73c01-1bb9-4612-a1a7-16d71b732e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6db4163b-49c9-41e1-bfbb-098d83fd379d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.474617] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Creating folder: Project (20ab85756df94226800ca6a415d05d7b). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.475073] env[69982]: DEBUG oslo_vmware.api [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864185, 'name': PowerOnVM_Task, 'duration_secs': 1.316002} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.476076] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a62d1c3-54f2-4ee4-8580-3391a6c8d3c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.481174] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 706.481457] env[69982]: INFO nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Took 10.05 seconds to spawn the instance on the hypervisor. [ 706.481601] env[69982]: DEBUG nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.482643] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caab4b63-2800-44bb-83d3-95ef8ebfa6c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.494065] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864189, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.496550] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Created folder: Project (20ab85756df94226800ca6a415d05d7b) in parent group-v767796. [ 706.496802] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Creating folder: Instances. Parent ref: group-v767860. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.500711] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88e0644f-5034-4b3b-9954-e4cbefe8ef77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.513105] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Created folder: Instances in parent group-v767860. [ 706.513105] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.514114] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.514311] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1b1c967-5f50-4ed2-922d-47294968bc6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.544360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13672cb4-3817-49f5-9339-80d2a85b783e tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.578s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.544360] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.544360] env[69982]: value = "task-3864192" [ 706.544360] env[69982]: _type = "Task" [ 706.544360] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.556605] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864192, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.618218] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f1c4b-e756-9c5e-d93a-a0393d435935, 'name': SearchDatastore_Task, 'duration_secs': 0.02384} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.619542] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e950101-dc9b-4c4e-82fb-a1b3d52f065b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.627141] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 706.627141] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255c1e5-f03e-27db-7e01-c48ab1d5f1ce" [ 706.627141] env[69982]: _type = "Task" [ 706.627141] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.636552] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255c1e5-f03e-27db-7e01-c48ab1d5f1ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.725577] env[69982]: DEBUG nova.compute.manager [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Received event network-changed-3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.725750] env[69982]: DEBUG nova.compute.manager [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Refreshing instance network info cache due to event network-changed-3abe4cd5-150d-42ef-b772-c706ee0b6a33. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 706.725952] env[69982]: DEBUG oslo_concurrency.lockutils [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] Acquiring lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.726112] env[69982]: DEBUG oslo_concurrency.lockutils [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] Acquired lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.726281] env[69982]: DEBUG nova.network.neutron [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Refreshing network info cache for port 3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 706.736558] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 706.739808] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.765362] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.766194] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.766194] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.766194] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.766762] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.766762] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.766834] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.767033] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.767278] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.767504] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.767762] env[69982]: DEBUG nova.virt.hardware [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.768799] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b4ec2-4946-4a77-aa7e-f49acad4be0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.780515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78de282e-9d0a-40b2-9c74-c2a8fba81cc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.907935] env[69982]: DEBUG nova.compute.manager [None req-79c5869c-6de2-49b6-a8cf-99988ed9fb51 tempest-ServerDiagnosticsTest-1778454847 tempest-ServerDiagnosticsTest-1778454847-project-admin] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 706.909438] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a10deab-9f8f-4c19-92e1-a07433c08a37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.918436] env[69982]: INFO nova.compute.manager [None req-79c5869c-6de2-49b6-a8cf-99988ed9fb51 tempest-ServerDiagnosticsTest-1778454847 tempest-ServerDiagnosticsTest-1778454847-project-admin] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Retrieving diagnostics [ 706.919474] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be45c7b1-a11e-4f9b-9239-4f952b9aef17 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.925309] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.962924] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.067s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.965342] env[69982]: INFO nova.compute.claims [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.983051] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.983390] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.988414] env[69982]: INFO nova.scheduler.client.report [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Deleted allocations for instance 5c9b6dc6-887e-477a-b902-135fe06cfbbd [ 706.999694] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864189, 'name': CreateSnapshot_Task, 'duration_secs': 1.162433} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.999995] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 707.001774] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5de9293-a3e4-4a13-a73b-80cb1482ef1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.008807] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 707.012484] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d5ea77f-7935-4928-a569-99432d8e056e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.015328] env[69982]: INFO nova.compute.manager [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Took 34.10 seconds to build instance. [ 707.030640] env[69982]: DEBUG oslo_vmware.api [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 707.030640] env[69982]: value = "task-3864193" [ 707.030640] env[69982]: _type = "Task" [ 707.030640] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.045059] env[69982]: DEBUG oslo_vmware.api [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.046850] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 707.063773] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864192, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.138968] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255c1e5-f03e-27db-7e01-c48ab1d5f1ce, 'name': SearchDatastore_Task, 'duration_secs': 0.01356} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.139333] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.139607] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fb6d0f81-0eb1-45aa-a3ad-d3958de582c0/fb6d0f81-0eb1-45aa-a3ad-d3958de582c0.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 707.139920] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bd73123-c53a-4dc8-ae31-f5325cfff96f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.149687] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 707.149687] env[69982]: value = "task-3864194" [ 707.149687] env[69982]: _type = "Task" [ 707.149687] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.161969] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.434422] env[69982]: DEBUG nova.compute.manager [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.434422] env[69982]: DEBUG nova.compute.manager [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing instance network info cache due to event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 707.434422] env[69982]: DEBUG oslo_concurrency.lockutils [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.434422] env[69982]: DEBUG oslo_concurrency.lockutils [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.434422] env[69982]: DEBUG nova.network.neutron [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 707.454106] env[69982]: DEBUG nova.network.neutron [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Updated VIF entry in instance network info cache for port 3abe4cd5-150d-42ef-b772-c706ee0b6a33. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 707.454469] env[69982]: DEBUG nova.network.neutron [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Updating instance_info_cache with network_info: [{"id": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "address": "fa:16:3e:29:d9:c9", "network": {"id": "950bf596-c0f5-4c51-8c15-6976fd97c2f8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-706333667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "100f7b204a534b4abc5ac945fd4c5393", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92233552-2c0c-416e-9bf3-bfcca8eda2dc", "external-id": "nsx-vlan-transportzone-251", "segmentation_id": 251, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3abe4cd5-15", "ovs_interfaceid": "3abe4cd5-150d-42ef-b772-c706ee0b6a33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.497422] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79c5722d-e79b-478d-9d17-96e29025ab82 tempest-ServerExternalEventsTest-1466398643 tempest-ServerExternalEventsTest-1466398643-project-member] Lock "5c9b6dc6-887e-477a-b902-135fe06cfbbd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.242s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.518238] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1688ecab-b546-4814-978a-3de59be77eac tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.438s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.535801] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 707.536304] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-eab58cd0-85c7-4deb-b1c2-b717967f73b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.554409] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 707.554409] env[69982]: value = "task-3864195" [ 707.554409] env[69982]: _type = "Task" [ 707.554409] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.565140] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864192, 'name': CreateVM_Task, 'duration_secs': 0.564079} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.565465] env[69982]: DEBUG oslo_vmware.api [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864193, 'name': PowerOffVM_Task, 'duration_secs': 0.236479} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.572070] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.572070] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.572070] env[69982]: DEBUG nova.compute.manager [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.574376] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.574549] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.574919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.575806] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003980ba-6ff0-4a6f-9901-c42bfbdcfa87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.580043] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f438796-b69f-4ad9-951d-733c1c23c0b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.595026] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 707.595026] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52267298-78c2-353c-f3f7-03a254226724" [ 707.595026] env[69982]: _type = "Task" [ 707.595026] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.597606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.598533] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864195, 'name': CloneVM_Task} progress is 11%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.614951] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52267298-78c2-353c-f3f7-03a254226724, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.637519] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Successfully updated port: ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.667509] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864194, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.959556] env[69982]: DEBUG oslo_concurrency.lockutils [req-06cbcbae-7d61-4993-a469-bd69e861c4e7 req-8f5f53ad-e9db-47ee-ae12-a82c6f2f4645 service nova] Releasing lock "refresh_cache-fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.022008] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 708.080456] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864195, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.103768] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdbd0863-33e5-4b99-bd6c-a9852661bd99 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.127s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.124977] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52267298-78c2-353c-f3f7-03a254226724, 'name': SearchDatastore_Task, 'duration_secs': 0.082029} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.126308] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.126640] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.126971] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.127622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.127860] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.128620] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b86200ca-b6cf-4c00-8a34-6349e6aa38d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.141777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.141777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.142012] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 708.147029] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.147354] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.151136] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f941646-c160-4ddf-91cd-e0b68143bcbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.163126] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 708.163126] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5261abb5-a61d-f641-530b-9c3ab0002535" [ 708.163126] env[69982]: _type = "Task" [ 708.163126] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.168340] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781462} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.172181] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fb6d0f81-0eb1-45aa-a3ad-d3958de582c0/fb6d0f81-0eb1-45aa-a3ad-d3958de582c0.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.172181] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.175383] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e4a8e2f-42ee-4426-af47-3710fe9843d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.189038] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5261abb5-a61d-f641-530b-9c3ab0002535, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.193024] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 708.193024] env[69982]: value = "task-3864196" [ 708.193024] env[69982]: _type = "Task" [ 708.193024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.207154] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864196, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.290651] env[69982]: DEBUG nova.network.neutron [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updated VIF entry in instance network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 708.294760] env[69982]: DEBUG nova.network.neutron [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.380851] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.380851] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.381086] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.381725] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.381938] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.384505] env[69982]: INFO nova.compute.manager [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Terminating instance [ 708.534442] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eee915d-d229-44bc-820a-4f7528bdb80d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.545168] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf810fac-8e4a-4274-a472-0e67ffb83b8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.555622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.596884] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5fb1b3-1ee1-464e-ae0d-cf8de51e7c30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.608758] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864195, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.610485] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cfa751-0432-41b1-b652-5fbc4b73e7b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.626818] env[69982]: DEBUG nova.compute.provider_tree [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.682130] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5261abb5-a61d-f641-530b-9c3ab0002535, 'name': SearchDatastore_Task, 'duration_secs': 0.027715} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.683033] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e86aca7e-a441-4b87-a072-5b45d4e2a78b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.690081] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 708.690081] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d0e14-15af-5e37-7f79-bb2189ac5a7d" [ 708.690081] env[69982]: _type = "Task" [ 708.690081] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.698762] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 708.707814] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d0e14-15af-5e37-7f79-bb2189ac5a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.712496] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864196, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079127} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.712762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 708.713665] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f06e3a-7f11-4e41-a7c0-c7dd0494d6ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.747034] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] fb6d0f81-0eb1-45aa-a3ad-d3958de582c0/fb6d0f81-0eb1-45aa-a3ad-d3958de582c0.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 708.747034] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c46dbbe7-81c3-4db8-bde5-53cc5d9bd7d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.786361] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 708.786361] env[69982]: value = "task-3864197" [ 708.786361] env[69982]: _type = "Task" [ 708.786361] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.795838] env[69982]: DEBUG oslo_concurrency.lockutils [req-9b34630a-9cce-49a5-b754-759fcecc59c1 req-e4dd62ac-f792-421c-a73d-6b8ffa278783 service nova] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.809026] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864197, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.891081] env[69982]: DEBUG nova.compute.manager [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 708.891714] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 708.893023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b5af08-72a2-463d-9dc4-470d9b59eaea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.903771] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 708.904570] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c8e7f2d-0ab1-4c00-9e86-87c9efdf40b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.915598] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 708.915598] env[69982]: value = "task-3864198" [ 708.915598] env[69982]: _type = "Task" [ 708.915598] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.935614] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864198, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.054795] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "9d1b0a5f-e096-4641-a077-f0949135efbb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.055122] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.055349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.055573] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.055788] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.058164] env[69982]: INFO nova.compute.manager [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Terminating instance [ 709.101874] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864195, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.133326] env[69982]: DEBUG nova.scheduler.client.report [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 709.188343] env[69982]: DEBUG nova.network.neutron [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [{"id": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "address": "fa:16:3e:7d:30:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8d3aec-23", "ovs_interfaceid": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.203586] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d0e14-15af-5e37-7f79-bb2189ac5a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.036013} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.203984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.204323] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/ebd9e006-a591-44f7-867c-041731b9d45a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.204671] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a847a04-8e63-46ef-9a67-0b08b2e7bc35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.221024] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 709.221024] env[69982]: value = "task-3864199" [ 709.221024] env[69982]: _type = "Task" [ 709.221024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.235288] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.297780] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864197, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.434075] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864198, 'name': PowerOffVM_Task, 'duration_secs': 0.331836} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.434478] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 709.434726] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 709.435054] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70c7853d-1fb7-46cb-9c82-fd66ff583395 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.505843] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 709.506151] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 709.506904] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Deleting the datastore file [datastore1] 78ba4fa9-4083-4204-a5b4-88cdcec6ca13 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 709.506904] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-35b5729f-c3f6-4143-a536-4594362ceff6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.516795] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for the task: (returnval){ [ 709.516795] env[69982]: value = "task-3864201" [ 709.516795] env[69982]: _type = "Task" [ 709.516795] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.525971] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864201, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.564967] env[69982]: DEBUG nova.compute.manager [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 709.565364] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 709.566412] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e187701-e155-44a8-aaeb-fcf7a3c61dca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.576308] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 709.576724] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a891506-be49-48dd-9281-214235cceb71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.588352] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 709.588352] env[69982]: value = "task-3864202" [ 709.588352] env[69982]: _type = "Task" [ 709.588352] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.602647] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.606556] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864195, 'name': CloneVM_Task, 'duration_secs': 1.984453} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.606869] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Created linked-clone VM from snapshot [ 709.607831] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e03fbf-9bee-43f6-9e19-d5b0bb046560 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.617452] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Uploading image c05a1212-9935-4ef6-b538-e562efbce525 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 709.640487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.677s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.641538] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 709.645769] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.641s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.647756] env[69982]: INFO nova.compute.claims [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.655024] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 709.655024] env[69982]: value = "vm-767864" [ 709.655024] env[69982]: _type = "VirtualMachine" [ 709.655024] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 709.655024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9956eb66-9918-4984-ab88-f6d7bdf4e2cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.662836] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lease: (returnval){ [ 709.662836] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52064639-f746-bd6a-e0db-158ffd53b1a6" [ 709.662836] env[69982]: _type = "HttpNfcLease" [ 709.662836] env[69982]: } obtained for exporting VM: (result){ [ 709.662836] env[69982]: value = "vm-767864" [ 709.662836] env[69982]: _type = "VirtualMachine" [ 709.662836] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 709.663139] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the lease: (returnval){ [ 709.663139] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52064639-f746-bd6a-e0db-158ffd53b1a6" [ 709.663139] env[69982]: _type = "HttpNfcLease" [ 709.663139] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 709.673097] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 709.673097] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52064639-f746-bd6a-e0db-158ffd53b1a6" [ 709.673097] env[69982]: _type = "HttpNfcLease" [ 709.673097] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 709.691812] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.692207] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Instance network_info: |[{"id": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "address": "fa:16:3e:7d:30:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8d3aec-23", "ovs_interfaceid": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 709.692643] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:30:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff8d3aec-2392-4a4a-80c2-aa0499153235', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 709.708083] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating folder: Project (07f7b975ecb449a290e2ae6582e07016). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.709335] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c9d408f-c6f4-4733-85e2-93fb4b65f94f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.726032] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created folder: Project (07f7b975ecb449a290e2ae6582e07016) in parent group-v767796. [ 709.726032] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating folder: Instances. Parent ref: group-v767865. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 709.731035] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-202d3c85-54a2-448b-99db-5d13dbae14a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.740705] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864199, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.746870] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created folder: Instances in parent group-v767865. [ 709.747255] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 709.747568] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 709.747844] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ebabc28-a61c-4702-856d-fc77ec48809e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.773642] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 709.773642] env[69982]: value = "task-3864206" [ 709.773642] env[69982]: _type = "Task" [ 709.773642] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.784793] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864206, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.798206] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864197, 'name': ReconfigVM_Task, 'duration_secs': 0.540273} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.798452] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Reconfigured VM instance instance-00000015 to attach disk [datastore2] fb6d0f81-0eb1-45aa-a3ad-d3958de582c0/fb6d0f81-0eb1-45aa-a3ad-d3958de582c0.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 709.799168] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ba36324-f7d8-45e3-9f47-f55b2094fb99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.812227] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 709.812227] env[69982]: value = "task-3864207" [ 709.812227] env[69982]: _type = "Task" [ 709.812227] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.823359] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864207, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.988347] env[69982]: DEBUG nova.compute.manager [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received event network-vif-plugged-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.988548] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Acquiring lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.988695] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.988877] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.989108] env[69982]: DEBUG nova.compute.manager [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] No waiting events found dispatching network-vif-plugged-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 709.989308] env[69982]: WARNING nova.compute.manager [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received unexpected event network-vif-plugged-ff8d3aec-2392-4a4a-80c2-aa0499153235 for instance with vm_state building and task_state spawning. [ 709.989509] env[69982]: DEBUG nova.compute.manager [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.989762] env[69982]: DEBUG nova.compute.manager [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing instance network info cache due to event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 709.989882] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Acquiring lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.990048] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Acquired lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.990315] env[69982]: DEBUG nova.network.neutron [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 710.033163] env[69982]: DEBUG oslo_vmware.api [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Task: {'id': task-3864201, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337821} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.033444] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.033721] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.035217] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.035500] env[69982]: INFO nova.compute.manager [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Took 1.14 seconds to destroy the instance on the hypervisor. [ 710.035839] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.036138] env[69982]: DEBUG nova.compute.manager [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 710.036272] env[69982]: DEBUG nova.network.neutron [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 710.100318] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864202, 'name': PowerOffVM_Task, 'duration_secs': 0.310317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.100611] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 710.100862] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 710.101050] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8ca77ee-f912-44a2-a896-6e85b3ab12f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.153991] env[69982]: DEBUG nova.compute.utils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 710.159072] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 710.159072] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 710.177753] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 710.177886] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 710.178026] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Deleting the datastore file [datastore1] 9d1b0a5f-e096-4641-a077-f0949135efbb {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 710.179951] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96b13959-1af8-40b0-9e0d-95643a9fa9c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.184017] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 710.184017] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52064639-f746-bd6a-e0db-158ffd53b1a6" [ 710.184017] env[69982]: _type = "HttpNfcLease" [ 710.184017] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 710.184716] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 710.184716] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52064639-f746-bd6a-e0db-158ffd53b1a6" [ 710.184716] env[69982]: _type = "HttpNfcLease" [ 710.184716] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 710.185872] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2289fe46-ecc5-4a10-8351-bf20f67ba727 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.191261] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for the task: (returnval){ [ 710.191261] env[69982]: value = "task-3864209" [ 710.191261] env[69982]: _type = "Task" [ 710.191261] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.198982] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 710.199467] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 710.207968] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.268199] env[69982]: DEBUG nova.policy [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c13b5c88a88649db908ebe8a5af37aea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'db52cf2370234a5296423eceeaf74485', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 710.282457] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744306} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.283591] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/ebd9e006-a591-44f7-867c-041731b9d45a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.283591] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.283880] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d16e9e9-783c-4360-b570-717037ac63bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.289520] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864206, 'name': CreateVM_Task, 'duration_secs': 0.466386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.289825] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 710.290543] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.290810] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.291024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 710.291660] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1efd41a3-055f-4b7c-944c-874d7be13fb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.294706] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 710.294706] env[69982]: value = "task-3864210" [ 710.294706] env[69982]: _type = "Task" [ 710.294706] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.299824] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 710.299824] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5203010d-9ea0-ab16-a583-536f02a8c805" [ 710.299824] env[69982]: _type = "Task" [ 710.299824] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.309909] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.320381] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5203010d-9ea0-ab16-a583-536f02a8c805, 'name': SearchDatastore_Task, 'duration_secs': 0.015672} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.321243] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.321548] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 710.321838] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.322044] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.322544] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 710.322649] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d8fb0759-d884-4eeb-900a-04088f48c718 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.328073] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864207, 'name': Rename_Task, 'duration_secs': 0.263991} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.331120] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.331812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06a86d5e-e3bd-4d3e-acd3-cd6e5bf99d96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.335399] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0ff3172c-b879-4438-842b-d3475d317490 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.341704] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 710.341704] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 710.342794] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daa4e799-975f-450d-9a9d-5110dc8b9a0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.346996] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 710.346996] env[69982]: value = "task-3864211" [ 710.346996] env[69982]: _type = "Task" [ 710.346996] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.353325] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 710.353325] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229d88b-ba4e-fe7b-436c-448020bb5e5c" [ 710.353325] env[69982]: _type = "Task" [ 710.353325] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.365283] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864211, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.383647] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229d88b-ba4e-fe7b-436c-448020bb5e5c, 'name': SearchDatastore_Task, 'duration_secs': 0.015879} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.384631] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79b090d7-861b-42f6-ab79-ef5aa488a621 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.399873] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 710.399873] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be9f18-47fa-6bcb-9201-fd4d230c743c" [ 710.399873] env[69982]: _type = "Task" [ 710.399873] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.409976] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be9f18-47fa-6bcb-9201-fd4d230c743c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.662025] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 710.711341] env[69982]: DEBUG oslo_vmware.api [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Task: {'id': task-3864209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213461} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.718023] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 710.718023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 710.718023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 710.718023] env[69982]: INFO nova.compute.manager [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 710.718023] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 710.718557] env[69982]: DEBUG nova.compute.manager [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 710.718557] env[69982]: DEBUG nova.network.neutron [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 710.747817] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Successfully created port: c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.806895] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080786} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.810828] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.812721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4323b20-1a37-4ec3-aa14-066ff14b379c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.851881] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/ebd9e006-a591-44f7-867c-041731b9d45a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.858050] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fc8df82-a138-45e5-9262-39dbf0090375 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.880506] env[69982]: DEBUG nova.network.neutron [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.889777] env[69982]: DEBUG oslo_vmware.api [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864211, 'name': PowerOnVM_Task, 'duration_secs': 0.543612} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.891442] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 710.893012] env[69982]: INFO nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Took 9.61 seconds to spawn the instance on the hypervisor. [ 710.893012] env[69982]: DEBUG nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 710.893012] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 710.893012] env[69982]: value = "task-3864212" [ 710.893012] env[69982]: _type = "Task" [ 710.893012] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.894050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddca69c6-0297-4a28-aaca-054d69ab69cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.936846] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.944790] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be9f18-47fa-6bcb-9201-fd4d230c743c, 'name': SearchDatastore_Task, 'duration_secs': 0.017265} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.944790] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.944790] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 31f56d0e-7c64-4fe3-917e-7ebb814ae924/31f56d0e-7c64-4fe3-917e-7ebb814ae924.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.944790] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39031518-5e88-4665-9f51-7fdf88a111c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.953249] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 710.953249] env[69982]: value = "task-3864213" [ 710.953249] env[69982]: _type = "Task" [ 710.953249] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.963142] env[69982]: DEBUG nova.network.neutron [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updated VIF entry in instance network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 710.963142] env[69982]: DEBUG nova.network.neutron [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [{"id": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "address": "fa:16:3e:7d:30:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8d3aec-23", "ovs_interfaceid": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 710.972256] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.171058] env[69982]: INFO nova.virt.block_device [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Booting with volume 7296ca0e-ce02-4177-9218-289c56e21e9a at /dev/sda [ 711.244664] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8e14ed6b-c74e-43bd-814b-c4678f04e31d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.260721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e34d05d-aeb7-4616-ac85-ca1e2c7039e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.307623] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5729a5f7-7c1d-4890-bfb2-60fef8f45a49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.319022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd84339-b516-4520-831f-36343a34ace0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.370763] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2513a82c-e5d3-4c26-af20-d26e84d26eff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.379533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747bc513-6f94-46de-9fff-bc6f9fee0968 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.385065] env[69982]: INFO nova.compute.manager [-] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Took 1.35 seconds to deallocate network for instance. [ 711.396813] env[69982]: DEBUG nova.virt.block_device [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updating existing volume attachment record: d29120f5-badb-4964-a15a-5ac9ce7283e1 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 711.416320] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.425353] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970d0247-e221-4532-b853-73e4234918ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.435149] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea47997-ee5c-4702-9d08-c3dbd18e2c90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.487012] env[69982]: INFO nova.compute.manager [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Took 35.58 seconds to build instance. [ 711.488725] env[69982]: DEBUG oslo_concurrency.lockutils [req-3a5bdfda-ac46-443d-ae00-b64fd2a3f06e req-d06af5e1-c713-4f23-8842-c18559652e1f service nova] Releasing lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 711.494840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ba3209-7cbd-4eb3-9334-798ff339b521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.509794] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864213, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.514434] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f867088-3489-4ccf-929a-42b15776d67c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.535541] env[69982]: DEBUG nova.objects.instance [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lazy-loading 'flavor' on Instance uuid 6de35617-22cf-4a32-8651-28ea67532b8f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 711.538620] env[69982]: DEBUG nova.compute.provider_tree [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 711.842276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "48162423-a117-437e-b171-9a40c7c6f49b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.842617] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.892980] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.912905] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864212, 'name': ReconfigVM_Task, 'duration_secs': 0.614426} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.913340] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Reconfigured VM instance instance-00000016 to attach disk [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/ebd9e006-a591-44f7-867c-041731b9d45a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.914680] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e54090c7-c8eb-41c6-b328-9dd0265e80a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.922066] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 711.922066] env[69982]: value = "task-3864214" [ 711.922066] env[69982]: _type = "Task" [ 711.922066] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.934979] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864214, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.989464] env[69982]: DEBUG oslo_concurrency.lockutils [None req-82a77c35-8e27-405f-b7c3-f38a09263a4e tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.132s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.003459] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.725989} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.004096] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 31f56d0e-7c64-4fe3-917e-7ebb814ae924/31f56d0e-7c64-4fe3-917e-7ebb814ae924.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 712.004543] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 712.007087] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47b911db-d324-42ae-8f65-676026420f31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.023036] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 712.023036] env[69982]: value = "task-3864215" [ 712.023036] env[69982]: _type = "Task" [ 712.023036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.036169] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.044132] env[69982]: DEBUG nova.scheduler.client.report [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 712.048301] env[69982]: DEBUG nova.network.neutron [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.049753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.049753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquired lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.050109] env[69982]: DEBUG nova.network.neutron [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 712.050387] env[69982]: DEBUG nova.objects.instance [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lazy-loading 'info_cache' on Instance uuid 6de35617-22cf-4a32-8651-28ea67532b8f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 712.096117] env[69982]: DEBUG nova.compute.manager [req-7e34c6af-0f3a-47a4-bb81-507048518cfe req-d83bf1c9-a2e2-40d6-ab0a-3a3ebb8bbaa2 service nova] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Received event network-vif-deleted-99cd9e66-3189-4fac-9e08-6cf8f661693e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.434966] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864214, 'name': Rename_Task, 'duration_secs': 0.251208} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.434966] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.435389] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e3d65a5a-5650-4c03-aa7d-b44df5205e74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.444032] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 712.444032] env[69982]: value = "task-3864216" [ 712.444032] env[69982]: _type = "Task" [ 712.444032] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.453444] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.455640] env[69982]: DEBUG nova.compute.manager [req-b0101fef-a7e3-4fb6-998a-73914c691cf4 req-541fd6d7-ea41-42cc-8a9a-512d392bb133 service nova] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Received event network-vif-deleted-f12b494d-8265-4d7f-82a2-f2ec5ed91285 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.496513] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 712.533719] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081978} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.534054] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 712.535022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080159ad-7b2e-4c3b-8406-28aa711b4a5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.552338] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.906s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.552724] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 712.564403] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 31f56d0e-7c64-4fe3-917e-7ebb814ae924/31f56d0e-7c64-4fe3-917e-7ebb814ae924.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 712.565840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.811s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.567733] env[69982]: INFO nova.compute.claims [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 712.570778] env[69982]: INFO nova.compute.manager [-] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Took 1.85 seconds to deallocate network for instance. [ 712.572035] env[69982]: DEBUG nova.objects.base [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Object Instance<6de35617-22cf-4a32-8651-28ea67532b8f> lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 712.573447] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-032017a3-be45-429a-8cb3-565acf906e3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.601062] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 712.601062] env[69982]: value = "task-3864217" [ 712.601062] env[69982]: _type = "Task" [ 712.601062] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.611457] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864217, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.955113] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864216, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.017345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.066526] env[69982]: DEBUG nova.compute.utils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 713.069244] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 713.069244] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 713.071880] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Successfully updated port: c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.102193] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.117427] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864217, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.173018] env[69982]: DEBUG nova.policy [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99858c5fbda7454cab0188cf368e51f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83b53a0998874810b5302415624592cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 713.371754] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "d73153ad-9258-4c3c-9699-b6364408d631" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.371975] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.431505] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.431799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.432040] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.432228] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.432395] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.434615] env[69982]: INFO nova.compute.manager [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Terminating instance [ 713.454980] env[69982]: DEBUG oslo_vmware.api [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864216, 'name': PowerOnVM_Task, 'duration_secs': 0.781506} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.455964] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.456144] env[69982]: INFO nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Took 9.45 seconds to spawn the instance on the hypervisor. [ 713.456322] env[69982]: DEBUG nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.457217] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985a143c-6d9a-4d8e-9f2e-dd58834b3d3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.558031] env[69982]: DEBUG nova.network.neutron [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updating instance_info_cache with network_info: [{"id": "2819d991-2637-403b-b564-70e27a846f61", "address": "fa:16:3e:dd:be:99", "network": {"id": "bc307ef7-1c51-4c26-a728-ddad07a1be96", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1183462202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e788d70221a4c9c834bd785b689b7f6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37e8d2ee-abfc-42e2-a8fa-ee5447f1f1da", "external-id": "nsx-vlan-transportzone-813", "segmentation_id": 813, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2819d991-26", "ovs_interfaceid": "2819d991-2637-403b-b564-70e27a846f61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.574656] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 713.577665] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.577836] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquired lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.577997] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 713.608585] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 713.608691] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 713.609644] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.609644] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 713.609644] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.609644] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 713.610031] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 713.610031] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 713.610394] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 713.611026] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 713.611026] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 713.611149] env[69982]: DEBUG nova.virt.hardware [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 713.620480] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01940864-7999-45e7-b823-17999a5cd948 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.413407] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Successfully created port: b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.419796] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.420106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.420370] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.420596] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.420787] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.422931] env[69982]: DEBUG nova.compute.manager [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 714.423147] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.431029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Releasing lock "refresh_cache-6de35617-22cf-4a32-8651-28ea67532b8f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.449632] env[69982]: INFO nova.compute.manager [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Terminating instance [ 714.449805] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c27bd5-10f7-4827-ae11-13f6aec7f81f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.455021] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864217, 'name': ReconfigVM_Task, 'duration_secs': 0.524781} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.457594] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 31f56d0e-7c64-4fe3-917e-7ebb814ae924/31f56d0e-7c64-4fe3-917e-7ebb814ae924.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 714.462920] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8f83a4a-e858-49f0-b137-c3026cfa445f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.470583] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a19e659-a161-40df-b446-62950d6bcfa9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.476435] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.477914] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4943ad5-2580-4b16-8466-3dc26cdd8e67 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.488825] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 714.488825] env[69982]: value = "task-3864218" [ 714.488825] env[69982]: _type = "Task" [ 714.488825] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.507680] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 714.507680] env[69982]: value = "task-3864219" [ 714.507680] env[69982]: _type = "Task" [ 714.507680] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.520112] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864218, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.523310] env[69982]: INFO nova.compute.manager [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Took 35.61 seconds to build instance. [ 714.528921] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864219, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.552232] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 714.593087] env[69982]: DEBUG nova.compute.manager [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Received event network-vif-plugged-c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.593377] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Acquiring lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.594425] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.594425] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.594425] env[69982]: DEBUG nova.compute.manager [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] No waiting events found dispatching network-vif-plugged-c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 714.594425] env[69982]: WARNING nova.compute.manager [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Received unexpected event network-vif-plugged-c81278f4-6f59-40a5-aada-68e9141c3415 for instance with vm_state building and task_state spawning. [ 714.594425] env[69982]: DEBUG nova.compute.manager [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Received event network-changed-c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.594847] env[69982]: DEBUG nova.compute.manager [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Refreshing instance network info cache due to event network-changed-c81278f4-6f59-40a5-aada-68e9141c3415. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 714.594847] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Acquiring lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.848390] env[69982]: DEBUG nova.network.neutron [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updating instance_info_cache with network_info: [{"id": "c81278f4-6f59-40a5-aada-68e9141c3415", "address": "fa:16:3e:cd:b0:79", "network": {"id": "390ad4c7-41f1-4907-8f9c-355e8f47fc4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1913038845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db52cf2370234a5296423eceeaf74485", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81278f4-6f", "ovs_interfaceid": "c81278f4-6f59-40a5-aada-68e9141c3415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.971178] env[69982]: DEBUG nova.compute.manager [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 714.971555] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.973557] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a942517-dfa8-4ae6-8316-043a12da1ef5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.985719] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.989774] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b18d04b-7f05-4cdc-bacc-1552cb148f8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.002464] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 715.002464] env[69982]: value = "task-3864220" [ 715.002464] env[69982]: _type = "Task" [ 715.002464] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.015374] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.019221] env[69982]: INFO nova.compute.manager [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Rescuing [ 715.019666] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.019962] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.020264] env[69982]: DEBUG nova.network.neutron [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 715.041076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e8f132cc-7d01-47b9-8fa0-f42334aafa2c tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.441s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.041509] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864218, 'name': Rename_Task, 'duration_secs': 0.229528} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.049466] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.059111] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f648186-54fa-4917-92ba-b091afb6e0b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.063690] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.074566] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864219, 'name': PowerOffVM_Task, 'duration_secs': 0.359705} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.080129] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 715.080129] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.080129] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 715.080323] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.080323] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 715.080323] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 715.080323] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 715.080478] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 715.080790] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 715.081034] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 715.081327] env[69982]: DEBUG nova.virt.hardware [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 715.082804] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 715.083137] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.085027] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5b876c-9e13-4f61-bb33-e4506951591b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.090985] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bfb1fd67-91b5-4517-903a-13c5b6725e8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.094219] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 715.094219] env[69982]: value = "task-3864221" [ 715.094219] env[69982]: _type = "Task" [ 715.094219] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.097554] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3408557d-bc4c-4647-956c-80150f2f6ac7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.114294] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1b03cb-c02e-43bf-8a04-ad271c4b0335 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.129978] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5f817c-9307-4aa9-a265-576d38a2e501 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.138934] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.196613] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2acbe42-7db8-4695-abcd-fa8929794f02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.199700] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.199937] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.200120] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Deleting the datastore file [datastore2] fb6d0f81-0eb1-45aa-a3ad-d3958de582c0 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.200434] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb4e7316-52c5-4207-996e-a33ad78b6fe9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.216226] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16a760e-a9c0-449d-a262-6765415e7e75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.220664] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for the task: (returnval){ [ 715.220664] env[69982]: value = "task-3864223" [ 715.220664] env[69982]: _type = "Task" [ 715.220664] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.236069] env[69982]: DEBUG nova.compute.provider_tree [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.243865] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.355320] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Releasing lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.356101] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Instance network_info: |[{"id": "c81278f4-6f59-40a5-aada-68e9141c3415", "address": "fa:16:3e:cd:b0:79", "network": {"id": "390ad4c7-41f1-4907-8f9c-355e8f47fc4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1913038845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db52cf2370234a5296423eceeaf74485", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81278f4-6f", "ovs_interfaceid": "c81278f4-6f59-40a5-aada-68e9141c3415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 715.356450] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Acquired lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.358168] env[69982]: DEBUG nova.network.neutron [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Refreshing network info cache for port c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 715.360029] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:b0:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c81278f4-6f59-40a5-aada-68e9141c3415', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.374539] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Creating folder: Project (db52cf2370234a5296423eceeaf74485). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.380426] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-879b2f3d-a83b-4e3d-8912-9a653eaad79e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.398308] env[69982]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 715.398391] env[69982]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69982) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 715.398911] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Folder already exists: Project (db52cf2370234a5296423eceeaf74485). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 715.399235] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Creating folder: Instances. Parent ref: group-v767824. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 715.400443] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61f91770-143b-486b-ae3b-295c53280ce7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.415178] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Created folder: Instances in parent group-v767824. [ 715.415614] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.415964] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.416306] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71a8ff51-aaa6-456d-8178-7d1970a5c959 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.452024] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.452024] env[69982]: value = "task-3864226" [ 715.452024] env[69982]: _type = "Task" [ 715.452024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.454064] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 715.454407] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f52d52a5-98af-42cb-8459-a4d12a4848e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.463875] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864226, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.465440] env[69982]: DEBUG oslo_vmware.api [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 715.465440] env[69982]: value = "task-3864227" [ 715.465440] env[69982]: _type = "Task" [ 715.465440] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.475114] env[69982]: DEBUG oslo_vmware.api [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.523411] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864220, 'name': PowerOffVM_Task, 'duration_secs': 0.286375} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.523871] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 715.524112] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.524421] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-117c8144-ec63-404c-9268-fa0cfb28b1a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.551463] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.609924] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.740833] env[69982]: DEBUG nova.scheduler.client.report [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 715.746549] env[69982]: DEBUG oslo_vmware.api [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Task: {'id': task-3864223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374125} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.749897] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 715.750326] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 715.750493] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.750790] env[69982]: INFO nova.compute.manager [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Took 1.33 seconds to destroy the instance on the hypervisor. [ 715.754378] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.756957] env[69982]: DEBUG nova.compute.manager [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 715.756957] env[69982]: DEBUG nova.network.neutron [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 715.869679] env[69982]: DEBUG nova.network.neutron [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updated VIF entry in instance network info cache for port c81278f4-6f59-40a5-aada-68e9141c3415. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 715.869679] env[69982]: DEBUG nova.network.neutron [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updating instance_info_cache with network_info: [{"id": "c81278f4-6f59-40a5-aada-68e9141c3415", "address": "fa:16:3e:cd:b0:79", "network": {"id": "390ad4c7-41f1-4907-8f9c-355e8f47fc4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1913038845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db52cf2370234a5296423eceeaf74485", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81278f4-6f", "ovs_interfaceid": "c81278f4-6f59-40a5-aada-68e9141c3415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.964316] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864226, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.977360] env[69982]: DEBUG oslo_vmware.api [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864227, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.041856] env[69982]: DEBUG nova.network.neutron [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.087339] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.109689] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.256167] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.690s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.256999] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 716.261652] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.670s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.263897] env[69982]: INFO nova.compute.claims [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 716.372857] env[69982]: DEBUG oslo_concurrency.lockutils [req-a5cc781f-80ee-495b-bb39-32f267542252 req-4e6a4645-e8a2-4722-ae35-2c501f0dc13a service nova] Releasing lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.449989] env[69982]: DEBUG nova.compute.manager [req-34f52c82-7f5c-42cd-8eb3-7e2995803461 req-bfdffb8d-77ee-4768-afa0-4fd137df739c service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Received event network-vif-deleted-3abe4cd5-150d-42ef-b772-c706ee0b6a33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.450659] env[69982]: INFO nova.compute.manager [req-34f52c82-7f5c-42cd-8eb3-7e2995803461 req-bfdffb8d-77ee-4768-afa0-4fd137df739c service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Neutron deleted interface 3abe4cd5-150d-42ef-b772-c706ee0b6a33; detaching it from the instance and deleting it from the info cache [ 716.450659] env[69982]: DEBUG nova.network.neutron [req-34f52c82-7f5c-42cd-8eb3-7e2995803461 req-bfdffb8d-77ee-4768-afa0-4fd137df739c service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.458710] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Successfully updated port: b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.473451] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864226, 'name': CreateVM_Task, 'duration_secs': 0.525937} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.478650] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.479500] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767837', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'name': 'volume-7296ca0e-ce02-4177-9218-289c56e21e9a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f59d53-c61b-48ad-b55d-710595e9dae3', 'attached_at': '', 'detached_at': '', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'serial': '7296ca0e-ce02-4177-9218-289c56e21e9a'}, 'guest_format': None, 'attachment_id': 'd29120f5-badb-4964-a15a-5ac9ce7283e1', 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69982) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 716.479762] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Root volume attach. Driver type: vmdk {{(pid=69982) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 716.481319] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de229fea-831f-4a42-a3c1-dd00ff0471c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.489876] env[69982]: DEBUG oslo_vmware.api [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864227, 'name': PowerOnVM_Task, 'duration_secs': 0.544116} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.490842] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 716.490992] env[69982]: DEBUG nova.compute.manager [None req-a0223c61-6955-4553-bf3e-5afb9aa1aa31 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 716.491733] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0c7dba-38d3-4836-ba07-33a896d5becc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.500859] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa58ebc7-9192-4627-b720-093385c63f6e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.516829] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096f0800-dec4-45ff-b11b-30937d0ed76b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.526552] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c8102260-bb1d-4bd9-bbec-bd3d774618f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.536222] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 716.536222] env[69982]: value = "task-3864229" [ 716.536222] env[69982]: _type = "Task" [ 716.536222] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.544367] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.550272] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864229, 'name': RelocateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.613778] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.723824] env[69982]: DEBUG nova.network.neutron [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.764822] env[69982]: DEBUG nova.compute.manager [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Received event network-vif-plugged-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.765270] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Acquiring lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.765631] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.765909] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.766206] env[69982]: DEBUG nova.compute.manager [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] No waiting events found dispatching network-vif-plugged-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.767061] env[69982]: WARNING nova.compute.manager [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Received unexpected event network-vif-plugged-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c for instance with vm_state building and task_state spawning. [ 716.767152] env[69982]: DEBUG nova.compute.manager [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Received event network-changed-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.767444] env[69982]: DEBUG nova.compute.manager [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Refreshing instance network info cache due to event network-changed-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 716.768258] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Acquiring lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.768369] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Acquired lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.768622] env[69982]: DEBUG nova.network.neutron [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Refreshing network info cache for port b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 716.775325] env[69982]: DEBUG nova.compute.utils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 716.778687] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 716.778687] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 716.875679] env[69982]: DEBUG nova.policy [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd74729efe5214a7ebd53b512d7a1208c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60ce3af012c140a08fb2b992dacd15aa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 716.954788] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ba06714-f322-4609-8617-20d87a76bb42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.970176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.984524] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fcab93-d0a3-48d6-a8b2-132cfaf864b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.017413] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 717.017820] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 717.018114] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Deleting the datastore file [datastore1] 6efb0df5-0435-424a-b4cc-1eaefdcf388d {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 717.023416] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64b8aef4-106f-481c-8785-ff9b475a1574 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.057281] env[69982]: DEBUG nova.compute.manager [req-34f52c82-7f5c-42cd-8eb3-7e2995803461 req-bfdffb8d-77ee-4768-afa0-4fd137df739c service nova] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Detach interface failed, port_id=3abe4cd5-150d-42ef-b772-c706ee0b6a33, reason: Instance fb6d0f81-0eb1-45aa-a3ad-d3958de582c0 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 717.062807] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for the task: (returnval){ [ 717.062807] env[69982]: value = "task-3864230" [ 717.062807] env[69982]: _type = "Task" [ 717.062807] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.072631] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864229, 'name': RelocateVM_Task} progress is 20%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.079143] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.112715] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.232073] env[69982]: INFO nova.compute.manager [-] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Took 1.48 seconds to deallocate network for instance. [ 717.284869] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.369644] env[69982]: DEBUG nova.network.neutron [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 717.410160] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Successfully created port: ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 717.582372] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864229, 'name': RelocateVM_Task, 'duration_secs': 0.60327} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.583427] env[69982]: DEBUG nova.network.neutron [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.587830] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 717.588193] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767837', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'name': 'volume-7296ca0e-ce02-4177-9218-289c56e21e9a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f59d53-c61b-48ad-b55d-710595e9dae3', 'attached_at': '', 'detached_at': '', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'serial': '7296ca0e-ce02-4177-9218-289c56e21e9a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 717.591095] env[69982]: DEBUG oslo_vmware.api [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Task: {'id': task-3864230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.594991] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab54fde-52a4-4538-92ad-929cd85b359a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.598198] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.598317] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.598508] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.598680] env[69982]: INFO nova.compute.manager [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Took 2.63 seconds to destroy the instance on the hypervisor. [ 717.598980] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.601332] env[69982]: DEBUG nova.compute.manager [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 717.601454] env[69982]: DEBUG nova.network.neutron [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 717.630471] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06c7a68-4077-428e-adb2-cd7485e741cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.638153] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.661722] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] volume-7296ca0e-ce02-4177-9218-289c56e21e9a/volume-7296ca0e-ce02-4177-9218-289c56e21e9a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 717.666710] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b636319-097d-4087-bca0-aad28254626f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.689166] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 717.689166] env[69982]: value = "task-3864231" [ 717.689166] env[69982]: _type = "Task" [ 717.689166] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.699657] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.743175] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.939540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d6895f-f6f0-4e1c-afc0-9e99778016a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.949684] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6095a1-53f9-462f-867e-0f2f30c0bdad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.995794] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9a6df1-fa3b-42ff-b396-99cc512dd161 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.006169] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee66389f-2baf-48a3-8057-989daf76967d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.026786] env[69982]: DEBUG nova.compute.provider_tree [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.095085] env[69982]: DEBUG oslo_concurrency.lockutils [req-25b7657d-07c3-43bb-9fe0-cf3fbd9c38df req-0fad0e04-2c54-404b-985a-12aff9c77498 service nova] Releasing lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.095488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.095658] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 718.105261] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 718.105706] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36444322-7f35-4218-80bf-abd91886a3a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.120623] env[69982]: DEBUG oslo_vmware.api [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864221, 'name': PowerOnVM_Task, 'duration_secs': 2.695223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.122267] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 718.122780] env[69982]: INFO nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Took 11.39 seconds to spawn the instance on the hypervisor. [ 718.123073] env[69982]: DEBUG nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.123509] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 718.123509] env[69982]: value = "task-3864232" [ 718.123509] env[69982]: _type = "Task" [ 718.123509] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.124295] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13838052-8bd2-44d4-981b-b636a347c3aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.138376] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.205781] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864231, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.301136] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.338887] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 718.339790] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.339998] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 718.340288] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.340406] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 718.340541] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 718.340783] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 718.341029] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 718.341219] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 718.341392] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 718.341649] env[69982]: DEBUG nova.virt.hardware [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 718.342674] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179965a5-1f9d-48f8-973b-6edfade547dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.355034] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de69ca4f-cd3e-42e6-98cd-38ebf7e378a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.532809] env[69982]: DEBUG nova.scheduler.client.report [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.638687] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864232, 'name': PowerOffVM_Task, 'duration_secs': 0.259086} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.639065] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 718.640350] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53efc7ef-244d-40bb-8bf1-f1eb4ef04c2e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.668276] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 718.671856] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b717836-f734-4c15-8cb6-ff42aaab9c04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.675588] env[69982]: INFO nova.compute.manager [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Took 35.87 seconds to build instance. [ 718.701722] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864231, 'name': ReconfigVM_Task, 'duration_secs': 0.540505} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.704986] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Reconfigured VM instance instance-00000018 to attach disk [datastore1] volume-7296ca0e-ce02-4177-9218-289c56e21e9a/volume-7296ca0e-ce02-4177-9218-289c56e21e9a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 718.714666] env[69982]: DEBUG nova.network.neutron [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.715997] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d747533-f18e-4c4a-be92-4508dd3e45f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.729592] env[69982]: INFO nova.compute.manager [-] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Took 1.13 seconds to deallocate network for instance. [ 718.737388] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 718.737702] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a70fafd-c919-4ca0-b8f3-13c1b3fcc52e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.741453] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 718.741453] env[69982]: value = "task-3864233" [ 718.741453] env[69982]: _type = "Task" [ 718.741453] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.747841] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 718.747841] env[69982]: value = "task-3864234" [ 718.747841] env[69982]: _type = "Task" [ 718.747841] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.755403] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.766292] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 718.766292] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.766292] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.766292] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.766814] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.766814] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71b8ff8a-14dc-4d68-8aa2-06f97d70bd94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.777323] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.777484] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.778262] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f3bae8-3a7d-4d7a-bb71-5bcedc33f617 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.789183] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 718.789183] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52025c96-b58f-1e33-b94f-6629fcec1afe" [ 718.789183] env[69982]: _type = "Task" [ 718.789183] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.799700] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52025c96-b58f-1e33-b94f-6629fcec1afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.806937] env[69982]: DEBUG nova.compute.manager [req-7c8c7048-3e49-417c-80bf-dde8777af677 req-fe16fca1-a6b8-484f-acae-603e51eece1c service nova] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Received event network-vif-deleted-ef327e31-93cb-44af-9ebb-5a46cd13075f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.888149] env[69982]: DEBUG nova.network.neutron [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Updating instance_info_cache with network_info: [{"id": "b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c", "address": "fa:16:3e:34:a1:de", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4fb0d2a-f1", "ovs_interfaceid": "b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.038700] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.039313] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 719.042355] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.343s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.178736] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f014226e-d2e3-4a54-a6d5-4c7b7e34cff8 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.382s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.241816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.253009] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864233, 'name': ReconfigVM_Task, 'duration_secs': 0.164971} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.253472] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767837', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'name': 'volume-7296ca0e-ce02-4177-9218-289c56e21e9a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f59d53-c61b-48ad-b55d-710595e9dae3', 'attached_at': '', 'detached_at': '', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'serial': '7296ca0e-ce02-4177-9218-289c56e21e9a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 719.254059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff980560-67b8-4add-a34a-ce4ab91eae44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.261373] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 719.261373] env[69982]: value = "task-3864235" [ 719.261373] env[69982]: _type = "Task" [ 719.261373] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.272014] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864235, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.301216] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52025c96-b58f-1e33-b94f-6629fcec1afe, 'name': SearchDatastore_Task, 'duration_secs': 0.012951} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.302152] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98118b35-61c6-4e86-8beb-af81b81a2c0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.308726] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 719.308726] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5206b97b-993e-aec0-8f3c-a9ee3fa484ee" [ 719.308726] env[69982]: _type = "Task" [ 719.308726] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.318092] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5206b97b-993e-aec0-8f3c-a9ee3fa484ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.391359] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-f9124657-d1c5-4a93-9d4a-3b06ca60ec63" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.391727] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance network_info: |[{"id": "b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c", "address": "fa:16:3e:34:a1:de", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4fb0d2a-f1", "ovs_interfaceid": "b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 719.392189] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:a1:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 719.406508] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating folder: Project (83b53a0998874810b5302415624592cd). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.406840] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9eae7231-a5ad-4f70-ad12-742a171f4fe1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.421444] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created folder: Project (83b53a0998874810b5302415624592cd) in parent group-v767796. [ 719.421444] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating folder: Instances. Parent ref: group-v767870. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 719.421748] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3a4a0e23-2afd-49bf-ba41-a40be50b2f12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.432969] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created folder: Instances in parent group-v767870. [ 719.433352] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 719.433477] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 719.433747] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c58a13de-33ff-4c8b-927b-96f70c8e28a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.459456] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 719.459456] env[69982]: value = "task-3864238" [ 719.459456] env[69982]: _type = "Task" [ 719.459456] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.471942] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864238, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.546704] env[69982]: DEBUG nova.compute.utils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 719.551326] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 719.551519] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 719.625037] env[69982]: DEBUG nova.policy [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ae2287e124b435bb8814032c83719e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75340e5596304ad38d40e450a5425cba', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 719.683514] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.778711] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864235, 'name': Rename_Task, 'duration_secs': 0.217883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.778979] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.779259] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c15a5cc7-e2d7-467b-b4e4-a15cc8fe4b46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.792564] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 719.792564] env[69982]: value = "task-3864239" [ 719.792564] env[69982]: _type = "Task" [ 719.792564] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.803473] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864239, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.810439] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 719.815335] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded035d1-d839-4017-986e-4a476c8b9e03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.827936] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 719.828253] env[69982]: ERROR oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk due to incomplete transfer. [ 719.833285] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b39a3948-be0c-46ee-a05b-97b9c501f363 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.835934] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5206b97b-993e-aec0-8f3c-a9ee3fa484ee, 'name': SearchDatastore_Task, 'duration_secs': 0.027298} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.840521] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.840756] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 719.841692] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a31a6af-131b-4b76-ad17-29360a57ff4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.849019] env[69982]: DEBUG oslo_vmware.rw_handles [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528f3312-5e0c-44e9-d236-7e94a3d85203/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 719.849263] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Uploaded image c05a1212-9935-4ef6-b538-e562efbce525 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 719.852152] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 719.854340] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bfb1a161-9941-4726-bd5f-c140de697a53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.856300] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 719.856300] env[69982]: value = "task-3864240" [ 719.856300] env[69982]: _type = "Task" [ 719.856300] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.865803] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 719.865803] env[69982]: value = "task-3864241" [ 719.865803] env[69982]: _type = "Task" [ 719.865803] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.873924] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864240, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.879892] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864241, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.926670] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Successfully updated port: ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 719.975538] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864238, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.055026] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 720.071374] env[69982]: DEBUG nova.compute.manager [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Received event network-vif-plugged-ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.071634] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] Acquiring lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.072430] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.072677] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.072872] env[69982]: DEBUG nova.compute.manager [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] No waiting events found dispatching network-vif-plugged-ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 720.073157] env[69982]: WARNING nova.compute.manager [req-5ba1f92c-9306-432f-bf8c-271ed19ee8ed req-08630c27-c5d4-4bec-9509-f0dff8634efe service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Received unexpected event network-vif-plugged-ad716338-99c7-49f2-a530-97e342fab644 for instance with vm_state building and task_state spawning. [ 720.176272] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e2fae0-926c-4cbf-af99-78c2d93d30e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.184687] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea224a1-0d1c-49e6-9ef0-d5fd8869a0cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.226459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.227403] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630fdbd3-4765-46bf-bd34-2a4694008dba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.238053] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-758288c5-cb09-4e8a-8722-085ce4a56aa2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.255550] env[69982]: DEBUG nova.compute.provider_tree [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.307683] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864239, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.370797] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864240, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.385169] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864241, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.429064] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.429472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquired lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.429924] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 720.457683] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Successfully created port: aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.474644] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864238, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.758668] env[69982]: DEBUG nova.scheduler.client.report [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.805837] env[69982]: DEBUG oslo_vmware.api [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864239, 'name': PowerOnVM_Task, 'duration_secs': 0.701396} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.806035] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 720.806245] env[69982]: INFO nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Took 7.20 seconds to spawn the instance on the hypervisor. [ 720.806723] env[69982]: DEBUG nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.807491] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd484044-1156-4ef3-b31e-72ea55854686 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.870098] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864240, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723837} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.871211] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 720.875221] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb078a7d-5477-487f-b0ea-28260b07752f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.884990] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864241, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.912242] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.912910] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10d94923-76df-44be-9816-f011f7009e75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.937289] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 720.937289] env[69982]: value = "task-3864242" [ 720.937289] env[69982]: _type = "Task" [ 720.937289] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.949886] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864242, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.975456] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864238, 'name': CreateVM_Task, 'duration_secs': 1.430498} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.975456] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 720.975456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.975456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.975456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 720.975706] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-521e34b7-8fe9-4701-89c8-6407c6742127 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.983364] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 720.983364] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd616-5595-7975-7d6a-08a4abc770be" [ 720.983364] env[69982]: _type = "Task" [ 720.983364] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.993647] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd616-5595-7975-7d6a-08a4abc770be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.018528] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 721.067444] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 721.100999] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 721.101274] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.101407] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 721.101592] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.101738] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 721.102218] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 721.102512] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 721.102903] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 721.102903] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 721.103020] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 721.103201] env[69982]: DEBUG nova.virt.hardware [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 721.104156] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246c6747-4f68-4cce-8c6d-ec583f71af6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.113378] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f31d06-ad23-4c15-8831-cbb256da9838 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.341566] env[69982]: INFO nova.compute.manager [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Took 33.51 seconds to build instance. [ 721.358852] env[69982]: DEBUG nova.network.neutron [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updating instance_info_cache with network_info: [{"id": "ad716338-99c7-49f2-a530-97e342fab644", "address": "fa:16:3e:5a:b3:6d", "network": {"id": "817d38e9-0594-4e2f-8145-2d18050b1e49", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-184903772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ce3af012c140a08fb2b992dacd15aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad716338-99", "ovs_interfaceid": "ad716338-99c7-49f2-a530-97e342fab644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.383999] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864241, 'name': Destroy_Task, 'duration_secs': 1.137466} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.385074] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Destroyed the VM [ 721.385074] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 721.385074] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9b98cc4c-afc7-4657-b2a2-1b26250730dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.392705] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 721.392705] env[69982]: value = "task-3864243" [ 721.392705] env[69982]: _type = "Task" [ 721.392705] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.404649] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864243, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.448980] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864242, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.500016] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd616-5595-7975-7d6a-08a4abc770be, 'name': SearchDatastore_Task, 'duration_secs': 0.044991} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.500610] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.501123] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 721.502032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.502032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.502032] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 721.502218] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37355812-c3e1-408c-9f05-a43f89dd3e9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.520359] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 721.520437] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 721.525861] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf4805ec-79cd-4903-ae67-0b1cbe9d607b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.529557] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 721.529557] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5222c743-8db7-f304-1f99-14b332a92a54" [ 721.529557] env[69982]: _type = "Task" [ 721.529557] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.539717] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5222c743-8db7-f304-1f99-14b332a92a54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.777976] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.735s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.784132] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.297s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.784132] env[69982]: DEBUG nova.objects.instance [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lazy-loading 'resources' on Instance uuid 049c7405-3daf-4064-8770-efbbf15c832e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.844242] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5a5dd24c-f242-471b-b443-7412f3eaa019 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.209s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.861425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Releasing lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 721.861749] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Instance network_info: |[{"id": "ad716338-99c7-49f2-a530-97e342fab644", "address": "fa:16:3e:5a:b3:6d", "network": {"id": "817d38e9-0594-4e2f-8145-2d18050b1e49", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-184903772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ce3af012c140a08fb2b992dacd15aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad716338-99", "ovs_interfaceid": "ad716338-99c7-49f2-a530-97e342fab644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 721.862176] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:b3:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad716338-99c7-49f2-a530-97e342fab644', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 721.869959] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Creating folder: Project (60ce3af012c140a08fb2b992dacd15aa). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 721.870406] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32972978-3f22-4ef0-8fc9-f0c5a325fb45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.882052] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Created folder: Project (60ce3af012c140a08fb2b992dacd15aa) in parent group-v767796. [ 721.882274] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Creating folder: Instances. Parent ref: group-v767873. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 721.882520] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4fc42f2-335f-4901-8f9f-1bae07332589 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.893132] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Created folder: Instances in parent group-v767873. [ 721.893132] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.893132] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 721.893132] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52580eec-d76f-4559-a9bc-b93ec54a5d4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.920912] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864243, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.923270] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 721.923270] env[69982]: value = "task-3864246" [ 721.923270] env[69982]: _type = "Task" [ 721.923270] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.935701] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864246, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.951717] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864242, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.971686] env[69982]: DEBUG nova.compute.manager [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.971686] env[69982]: DEBUG nova.compute.manager [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing instance network info cache due to event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 721.971686] env[69982]: DEBUG oslo_concurrency.lockutils [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] Acquiring lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.971686] env[69982]: DEBUG oslo_concurrency.lockutils [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] Acquired lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.971897] env[69982]: DEBUG nova.network.neutron [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 722.045921] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5222c743-8db7-f304-1f99-14b332a92a54, 'name': SearchDatastore_Task, 'duration_secs': 0.023768} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.047158] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4874a015-a0f5-4947-9bb6-2e13fd211e49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.055828] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 722.055828] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5227d72f-9701-a341-13e5-b76603aa91a5" [ 722.055828] env[69982]: _type = "Task" [ 722.055828] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.066476] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5227d72f-9701-a341-13e5-b76603aa91a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.347513] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 722.362380] env[69982]: INFO nova.scheduler.client.report [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleted allocation for migration 65e21314-0928-4022-97e3-0013ad63e1cd [ 722.412788] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864243, 'name': RemoveSnapshot_Task} progress is 58%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.434917] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864246, 'name': CreateVM_Task, 'duration_secs': 0.503401} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.437685] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.438570] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.439555] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.439555] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 722.439555] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0921c80-3ef4-4106-b71e-096857ddaf7a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.449357] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 722.449357] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b81ec0-3edd-cbcd-185e-dc9fd735b9e4" [ 722.449357] env[69982]: _type = "Task" [ 722.449357] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.453774] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864242, 'name': ReconfigVM_Task, 'duration_secs': 1.157548} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.458395] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Reconfigured VM instance instance-00000016 to attach disk [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 722.462042] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c9fc13-a23a-4a33-97f3-2e6662120020 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.472426] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b81ec0-3edd-cbcd-185e-dc9fd735b9e4, 'name': SearchDatastore_Task, 'duration_secs': 0.010933} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.488351] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.488628] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 722.488839] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.498807] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2c77c4a-9b3a-4f5c-a459-5f7bba9ab294 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.518347] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 722.518347] env[69982]: value = "task-3864247" [ 722.518347] env[69982]: _type = "Task" [ 722.518347] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.528971] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864247, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.532023] env[69982]: DEBUG nova.compute.manager [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Received event network-changed-ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 722.532023] env[69982]: DEBUG nova.compute.manager [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Refreshing instance network info cache due to event network-changed-ad716338-99c7-49f2-a530-97e342fab644. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 722.532023] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] Acquiring lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.532259] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] Acquired lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.532259] env[69982]: DEBUG nova.network.neutron [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Refreshing network info cache for port ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 722.571777] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5227d72f-9701-a341-13e5-b76603aa91a5, 'name': SearchDatastore_Task, 'duration_secs': 0.043202} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.572112] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.572922] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 722.572922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.572922] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 722.573221] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d278ed7-5210-4a2b-b4ae-268db41ace4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.575372] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-444abdd0-67ae-4482-a887-b0d0460924b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.586830] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 722.586830] env[69982]: value = "task-3864248" [ 722.586830] env[69982]: _type = "Task" [ 722.586830] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.593263] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 722.593802] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 722.594364] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-343aac3b-0f9b-43dc-8dc5-0675204b3b1c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.606445] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 722.606445] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f6100-834d-ad01-2d70-8174746dac95" [ 722.606445] env[69982]: _type = "Task" [ 722.606445] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.607261] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864248, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.622208] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f6100-834d-ad01-2d70-8174746dac95, 'name': SearchDatastore_Task, 'duration_secs': 0.010648} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.623025] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9267eaf3-0317-4584-9b5c-838e0d154120 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.632202] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 722.632202] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b04a-aeff-eb2a-8f96-0e17b1feb79b" [ 722.632202] env[69982]: _type = "Task" [ 722.632202] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.641547] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b04a-aeff-eb2a-8f96-0e17b1feb79b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.874014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e492c30-fd69-49fa-ac6c-df2d88d1f744 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.713s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.876384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.913773] env[69982]: DEBUG oslo_vmware.api [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864243, 'name': RemoveSnapshot_Task, 'duration_secs': 1.167182} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.914131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 722.914292] env[69982]: INFO nova.compute.manager [None req-0e286a3e-5915-4c31-b6a7-70a9b47ba038 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 18.02 seconds to snapshot the instance on the hypervisor. [ 722.932135] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2354e054-ad3f-49ae-a066-d548b1b132c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.943034] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4ce706-427b-4344-a5f0-c5d81876b626 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.954668] env[69982]: DEBUG nova.network.neutron [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updated VIF entry in instance network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 722.954668] env[69982]: DEBUG nova.network.neutron [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [{"id": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "address": "fa:16:3e:7d:30:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8d3aec-23", "ovs_interfaceid": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.990464] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7098c6e1-69cf-49b4-8d86-e03f019ca4d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.000620] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0e1ed2-a1bc-46a4-97dc-4dd3199608db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.020947] env[69982]: DEBUG nova.compute.provider_tree [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.034644] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864247, 'name': ReconfigVM_Task, 'duration_secs': 0.213268} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.034986] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.035335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96732d78-0560-44ee-9ce0-7beb0c7e57dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.048972] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 723.048972] env[69982]: value = "task-3864249" [ 723.048972] env[69982]: _type = "Task" [ 723.048972] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.067857] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864249, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.101601] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864248, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.148346] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200b04a-aeff-eb2a-8f96-0e17b1feb79b, 'name': SearchDatastore_Task, 'duration_secs': 0.010107} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.148632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.151979] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5743a020-0c09-45ec-aca4-5ce367cc201a/5743a020-0c09-45ec-aca4-5ce367cc201a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 723.151979] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b692e03b-2531-4438-b565-c49cc3c742ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.161723] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 723.161723] env[69982]: value = "task-3864250" [ 723.161723] env[69982]: _type = "Task" [ 723.161723] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.179082] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864250, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.262584] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Successfully updated port: aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 723.458399] env[69982]: DEBUG oslo_concurrency.lockutils [req-cd5dfa32-0093-4d64-8f95-01d726b2fadf req-92a2b395-ca46-4ea4-95f5-400bca2fc9b8 service nova] Releasing lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.500950] env[69982]: DEBUG nova.objects.instance [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lazy-loading 'flavor' on Instance uuid 743a4a52-ce35-4ec1-9286-e0c470e87186 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 723.529999] env[69982]: DEBUG nova.scheduler.client.report [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.569296] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864249, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.600048] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864248, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73746} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.600391] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 723.600594] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 723.600882] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfae1c4c-6786-4c79-bd71-ee6b88fa8888 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.611501] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 723.611501] env[69982]: value = "task-3864251" [ 723.611501] env[69982]: _type = "Task" [ 723.611501] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.622474] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.677373] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864250, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.770198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.770353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquired lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.770566] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 723.811549] env[69982]: DEBUG nova.network.neutron [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updated VIF entry in instance network info cache for port ad716338-99c7-49f2-a530-97e342fab644. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 723.812775] env[69982]: DEBUG nova.network.neutron [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updating instance_info_cache with network_info: [{"id": "ad716338-99c7-49f2-a530-97e342fab644", "address": "fa:16:3e:5a:b3:6d", "network": {"id": "817d38e9-0594-4e2f-8145-2d18050b1e49", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-184903772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ce3af012c140a08fb2b992dacd15aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad716338-99", "ovs_interfaceid": "ad716338-99c7-49f2-a530-97e342fab644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.010527] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.010840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.043208] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.260s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.045662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.715s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.047720] env[69982]: INFO nova.compute.claims [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 724.067236] env[69982]: DEBUG oslo_vmware.api [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864249, 'name': PowerOnVM_Task, 'duration_secs': 0.59332} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.068433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.073368] env[69982]: DEBUG nova.compute.manager [None req-7221c975-8b2f-4f53-8940-68809fc403b7 tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.074740] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020a5f0d-baf6-4b5a-8c93-5ea5d499a478 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.094622] env[69982]: INFO nova.scheduler.client.report [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Deleted allocations for instance 049c7405-3daf-4064-8770-efbbf15c832e [ 724.123626] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.220204} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.123930] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.125521] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e2748a5-e9ce-4373-80f5-fb2041adbdc9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.156402] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.158801] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-928fe031-181a-415c-aa2f-c8a19eb848f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.185480] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864250, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666375} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.186899] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5743a020-0c09-45ec-aca4-5ce367cc201a/5743a020-0c09-45ec-aca4-5ce367cc201a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 724.187142] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 724.187697] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 724.187697] env[69982]: value = "task-3864252" [ 724.187697] env[69982]: _type = "Task" [ 724.187697] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.187887] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41611dba-b060-434b-89e4-51413bfa7c6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.198799] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864252, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.200386] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 724.200386] env[69982]: value = "task-3864253" [ 724.200386] env[69982]: _type = "Task" [ 724.200386] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.318828] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e629b54-7be2-480b-8749-8e8915fc82cb req-6ee75891-9d73-4938-af55-171b5331dd07 service nova] Releasing lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.384540] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 724.562832] env[69982]: DEBUG nova.compute.manager [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received event network-vif-plugged-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.563191] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Acquiring lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.563719] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.564062] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.565404] env[69982]: DEBUG nova.compute.manager [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] No waiting events found dispatching network-vif-plugged-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 724.565404] env[69982]: WARNING nova.compute.manager [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received unexpected event network-vif-plugged-aead0d1e-bc05-4064-8494-624226340060 for instance with vm_state building and task_state spawning. [ 724.565404] env[69982]: DEBUG nova.compute.manager [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received event network-changed-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.565404] env[69982]: DEBUG nova.compute.manager [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing instance network info cache due to event network-changed-aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.565971] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Acquiring lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.606207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-914da4de-688f-478e-b0ce-590ee4b9d43f tempest-ServerAddressesNegativeTestJSON-411123896 tempest-ServerAddressesNegativeTestJSON-411123896-project-member] Lock "049c7405-3daf-4064-8770-efbbf15c832e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.845s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.695922] env[69982]: DEBUG nova.network.neutron [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [{"id": "aead0d1e-bc05-4064-8494-624226340060", "address": "fa:16:3e:3c:80:7e", "network": {"id": "d085c3c7-14e1-4907-8ba4-c86bdcc6d4d9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-258973916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "75340e5596304ad38d40e450a5425cba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563", "external-id": "nsx-vlan-transportzone-931", "segmentation_id": 931, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaead0d1e-bc", "ovs_interfaceid": "aead0d1e-bc05-4064-8494-624226340060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.702056] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.716031] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864253, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.401597} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.716031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 724.716031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5e1aa2-514e-4207-b8d6-8ea98d6dc276 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.742091] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 5743a020-0c09-45ec-aca4-5ce367cc201a/5743a020-0c09-45ec-aca4-5ce367cc201a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 724.742450] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a4e8758-9019-4ade-b66a-934c531a5120 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.764665] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 724.764665] env[69982]: value = "task-3864254" [ 724.764665] env[69982]: _type = "Task" [ 724.764665] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.777099] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.101503] env[69982]: DEBUG nova.network.neutron [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 725.204669] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Releasing lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 725.205174] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Instance network_info: |[{"id": "aead0d1e-bc05-4064-8494-624226340060", "address": "fa:16:3e:3c:80:7e", "network": {"id": "d085c3c7-14e1-4907-8ba4-c86bdcc6d4d9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-258973916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "75340e5596304ad38d40e450a5425cba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563", "external-id": "nsx-vlan-transportzone-931", "segmentation_id": 931, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaead0d1e-bc", "ovs_interfaceid": "aead0d1e-bc05-4064-8494-624226340060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 725.205787] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864252, 'name': ReconfigVM_Task, 'duration_secs': 0.975109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.206173] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Acquired lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.206439] env[69982]: DEBUG nova.network.neutron [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing network info cache for port aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 725.207807] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:80:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aead0d1e-bc05-4064-8494-624226340060', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 725.220366] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Creating folder: Project (75340e5596304ad38d40e450a5425cba). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.220694] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Reconfigured VM instance instance-00000019 to attach disk [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.226838] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2c64a708-fd7d-4756-b022-a6e1a5caa2c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.229472] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9eaefa9-10b1-4141-9cc4-503bb8359316 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.240300] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 725.240300] env[69982]: value = "task-3864256" [ 725.240300] env[69982]: _type = "Task" [ 725.240300] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.246299] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Created folder: Project (75340e5596304ad38d40e450a5425cba) in parent group-v767796. [ 725.246533] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Creating folder: Instances. Parent ref: group-v767876. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 725.250041] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67a9d380-8a37-4045-bfd3-92f289c7374c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.256364] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864256, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.268225] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Created folder: Instances in parent group-v767876. [ 725.268673] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 725.277908] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 725.278645] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1e62981-5b4c-47df-8bba-a881960aa95a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.305080] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864254, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.306484] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 725.306484] env[69982]: value = "task-3864258" [ 725.306484] env[69982]: _type = "Task" [ 725.306484] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.316685] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864258, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.449823] env[69982]: DEBUG nova.compute.manager [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Received event network-changed-c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 725.449823] env[69982]: DEBUG nova.compute.manager [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Refreshing instance network info cache due to event network-changed-c81278f4-6f59-40a5-aada-68e9141c3415. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 725.449823] env[69982]: DEBUG oslo_concurrency.lockutils [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] Acquiring lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.449823] env[69982]: DEBUG oslo_concurrency.lockutils [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] Acquired lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.449823] env[69982]: DEBUG nova.network.neutron [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Refreshing network info cache for port c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 725.683208] env[69982]: DEBUG nova.network.neutron [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updated VIF entry in instance network info cache for port aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 725.683208] env[69982]: DEBUG nova.network.neutron [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [{"id": "aead0d1e-bc05-4064-8494-624226340060", "address": "fa:16:3e:3c:80:7e", "network": {"id": "d085c3c7-14e1-4907-8ba4-c86bdcc6d4d9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-258973916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "75340e5596304ad38d40e450a5425cba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563", "external-id": "nsx-vlan-transportzone-931", "segmentation_id": 931, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaead0d1e-bc", "ovs_interfaceid": "aead0d1e-bc05-4064-8494-624226340060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.696746] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0d86ed-a43d-4f9c-892c-59031770a80d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.707720] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1ec33f-5e36-437a-9ff8-72f319a0ab45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.748849] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb126278-758d-44e9-b6f5-122604638a6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.762350] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631f8e22-96c9-498d-8725-9f633879c3fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.766815] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864256, 'name': Rename_Task, 'duration_secs': 0.16394} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.769572] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 725.770286] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff56f65b-1b5f-40b8-bafc-fc0c1baedd7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.785484] env[69982]: DEBUG nova.compute.provider_tree [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.793402] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864254, 'name': ReconfigVM_Task, 'duration_secs': 0.681507} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.794995] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 5743a020-0c09-45ec-aca4-5ce367cc201a/5743a020-0c09-45ec-aca4-5ce367cc201a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 725.795486] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 725.795486] env[69982]: value = "task-3864259" [ 725.795486] env[69982]: _type = "Task" [ 725.795486] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.795667] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39d06ab3-be04-4037-8cbb-689ca0796f25 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.815370] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.816268] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 725.816268] env[69982]: value = "task-3864260" [ 725.816268] env[69982]: _type = "Task" [ 725.816268] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.825648] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864258, 'name': CreateVM_Task, 'duration_secs': 0.330007} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.830405] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 725.830405] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864260, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.830802] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.830965] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 725.831332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 725.831609] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a062df5d-6f6d-4abd-bf25-46cad326f878 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.837681] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 725.837681] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52558fab-5544-759c-d4ca-e5169e6209e9" [ 725.837681] env[69982]: _type = "Task" [ 725.837681] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.849047] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52558fab-5544-759c-d4ca-e5169e6209e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.161609] env[69982]: DEBUG nova.network.neutron [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.183954] env[69982]: DEBUG oslo_concurrency.lockutils [req-d7114592-9134-4b8a-acf9-95d3561c8e8c req-9a04f1cc-6aa5-4adc-a4a5-032fa59e0b6f service nova] Releasing lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.290017] env[69982]: DEBUG nova.scheduler.client.report [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.308574] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864259, 'name': PowerOnVM_Task} progress is 98%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.333497] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864260, 'name': Rename_Task, 'duration_secs': 0.211001} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.335132] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 726.335132] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4442483-d579-4b0b-982b-122bf1e66717 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.343838] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 726.343838] env[69982]: value = "task-3864261" [ 726.343838] env[69982]: _type = "Task" [ 726.343838] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.352315] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52558fab-5544-759c-d4ca-e5169e6209e9, 'name': SearchDatastore_Task, 'duration_secs': 0.025662} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.353130] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.353408] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 726.353664] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.353807] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.353988] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 726.354377] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e00f2cdc-e825-4490-b212-b99c1aa37104 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.360646] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.374130] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 726.374130] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 726.374130] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e19cd45f-0cfd-443d-beda-9349708df9ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.379759] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 726.379759] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c04caa-c657-b238-f15e-a4272a42df14" [ 726.379759] env[69982]: _type = "Task" [ 726.379759] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.391017] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c04caa-c657-b238-f15e-a4272a42df14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.469637] env[69982]: DEBUG nova.network.neutron [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updated VIF entry in instance network info cache for port c81278f4-6f59-40a5-aada-68e9141c3415. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 726.471826] env[69982]: DEBUG nova.network.neutron [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updating instance_info_cache with network_info: [{"id": "c81278f4-6f59-40a5-aada-68e9141c3415", "address": "fa:16:3e:cd:b0:79", "network": {"id": "390ad4c7-41f1-4907-8f9c-355e8f47fc4e", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1913038845-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "db52cf2370234a5296423eceeaf74485", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc81278f4-6f", "ovs_interfaceid": "c81278f4-6f59-40a5-aada-68e9141c3415", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.599061] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "7af5a14d-f586-4746-9831-8be255581637" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.599903] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.667991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.668421] env[69982]: DEBUG nova.compute.manager [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Inject network info {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 726.668525] env[69982]: DEBUG nova.compute.manager [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] network_info to inject: |[{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 726.678759] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfiguring VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 726.679416] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c53d9c8-5022-4acf-beea-491c95cdb55d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.700169] env[69982]: DEBUG oslo_vmware.api [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 726.700169] env[69982]: value = "task-3864262" [ 726.700169] env[69982]: _type = "Task" [ 726.700169] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.702807] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.703077] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.704030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.704030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.704030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.707657] env[69982]: INFO nova.compute.manager [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Terminating instance [ 726.714292] env[69982]: DEBUG oslo_vmware.api [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864262, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.795640] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.750s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.796180] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.799366] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.060s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.799632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.799745] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 726.800126] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.203s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.802232] env[69982]: INFO nova.compute.claims [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 726.804940] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4488132-58bb-4f99-8331-66e5dc9fb068 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.818981] env[69982]: DEBUG oslo_vmware.api [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864259, 'name': PowerOnVM_Task, 'duration_secs': 0.712936} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.819343] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 726.820065] env[69982]: INFO nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Took 11.80 seconds to spawn the instance on the hypervisor. [ 726.820065] env[69982]: DEBUG nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 726.821139] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1671100d-e9df-41fb-8a9c-0bdfcb423c0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.825706] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254994d8-bb03-4077-8551-744a0ad0ac7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.842242] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9315109f-0c55-4bfb-85e9-8e8c8c587a10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.860884] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.864393] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef65368-fe80-46c3-9bdc-eea4a52b0d06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.901764] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179365MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 726.901912] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.912702] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c04caa-c657-b238-f15e-a4272a42df14, 'name': SearchDatastore_Task, 'duration_secs': 0.013017} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.913542] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e730ecf-8bba-4e53-98ed-b03c6d43dc4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.919576] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 726.919576] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e142d-e929-6fb6-9f1e-001184378d71" [ 726.919576] env[69982]: _type = "Task" [ 726.919576] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.929126] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e142d-e929-6fb6-9f1e-001184378d71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.979140] env[69982]: DEBUG oslo_concurrency.lockutils [req-74f2cdbb-2dca-40bf-882e-86f6d5b33389 req-19ac326d-0bd2-4e1f-af27-27687acd58b8 service nova] Releasing lock "refresh_cache-67f59d53-c61b-48ad-b55d-710595e9dae3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.210983] env[69982]: DEBUG oslo_vmware.api [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864262, 'name': ReconfigVM_Task, 'duration_secs': 0.183109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.214440] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cd176fa1-dc4d-4ff6-b969-923b0ec82599 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfigured VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 727.217315] env[69982]: DEBUG nova.compute.manager [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 727.219347] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 727.220379] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268e28ca-d866-4133-b6df-ebeb095f8e38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.233497] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 727.233918] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e10bed5-9237-42c5-bcc3-5022d5a800d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.243270] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 727.243270] env[69982]: value = "task-3864263" [ 727.243270] env[69982]: _type = "Task" [ 727.243270] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.252945] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.303090] env[69982]: DEBUG nova.compute.utils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.303608] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 727.303608] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.359249] env[69982]: DEBUG nova.policy [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.366813] env[69982]: INFO nova.compute.manager [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Took 37.39 seconds to build instance. [ 727.373803] env[69982]: DEBUG oslo_vmware.api [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864261, 'name': PowerOnVM_Task, 'duration_secs': 0.763208} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.374100] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 727.374310] env[69982]: INFO nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Took 9.07 seconds to spawn the instance on the hypervisor. [ 727.374577] env[69982]: DEBUG nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 727.375768] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56d4ce3-a1bc-438e-8b8a-c93c8370df0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.432975] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e142d-e929-6fb6-9f1e-001184378d71, 'name': SearchDatastore_Task, 'duration_secs': 0.021758} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.433696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.433696] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5ba60eb7-ee6f-47e2-b6ca-b54817dab371/5ba60eb7-ee6f-47e2-b6ca-b54817dab371.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 727.433867] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afd80ddc-e5cf-4174-bfce-0d0659b5fdea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.442936] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 727.442936] env[69982]: value = "task-3864264" [ 727.442936] env[69982]: _type = "Task" [ 727.442936] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.453867] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.514472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.515182] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.515182] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.515332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.515554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.520242] env[69982]: INFO nova.compute.manager [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Terminating instance [ 727.756022] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864263, 'name': PowerOffVM_Task, 'duration_secs': 0.363852} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.756399] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 727.756608] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 727.756894] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e17d83f-b23d-4643-959c-05efd20e846a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.782292] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Successfully created port: 061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 727.808153] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 727.869794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-76c081f3-8da5-4c57-82e4-b3a5effbfcc6 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.899s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.870494] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 727.870767] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 727.870907] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleting the datastore file [datastore1] 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 727.875711] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-589c5af0-07f8-4319-9586-35aef385d0ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.898678] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 727.898678] env[69982]: value = "task-3864266" [ 727.898678] env[69982]: _type = "Task" [ 727.898678] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.907686] env[69982]: INFO nova.compute.manager [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Took 34.20 seconds to build instance. [ 727.918711] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864266, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.962140] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864264, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.023744] env[69982]: DEBUG nova.compute.manager [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 728.024009] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 728.026311] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16999932-f23f-4b03-8f28-0c9bc822f147 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.041428] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 728.041837] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4bb49048-8271-4bb1-9df0-c154428d2ea1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.051956] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 728.051956] env[69982]: value = "task-3864267" [ 728.051956] env[69982]: _type = "Task" [ 728.051956] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.068179] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.170652] env[69982]: DEBUG nova.compute.manager [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.170976] env[69982]: DEBUG nova.compute.manager [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing instance network info cache due to event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 728.172292] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.172292] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.172292] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 728.323101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.323781] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.353580] env[69982]: DEBUG nova.compute.manager [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.354343] env[69982]: DEBUG nova.compute.manager [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing instance network info cache due to event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 728.355295] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.355625] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.356330] env[69982]: DEBUG nova.network.neutron [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 728.384336] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.411052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8e508b8-1ccc-4227-a871-d7e117821008 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.719s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.416629] env[69982]: DEBUG oslo_vmware.api [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864266, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386172} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.417555] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 728.417762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 728.417943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 728.418357] env[69982]: INFO nova.compute.manager [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 728.418887] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 728.423952] env[69982]: DEBUG nova.compute.manager [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 728.423952] env[69982]: DEBUG nova.network.neutron [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 728.455036] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864264, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553133} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.456406] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5ba60eb7-ee6f-47e2-b6ca-b54817dab371/5ba60eb7-ee6f-47e2-b6ca-b54817dab371.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 728.456648] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 728.457453] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2265e5b3-f42c-49d5-ad03-2ea99597017a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.460169] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbfdcfb4-bbeb-4ba3-b9fb-d067dfe53310 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.470209] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469ebff4-b4fb-4ca3-8786-d8f84a64e548 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.472295] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 728.472295] env[69982]: value = "task-3864268" [ 728.472295] env[69982]: _type = "Task" [ 728.472295] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.504282] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6a7794-28f8-480d-a229-becf819270d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.511335] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864268, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.518984] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7611acf-d1d9-46ab-a2cd-c882adbd25c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.538711] env[69982]: DEBUG nova.compute.provider_tree [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 728.568059] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864267, 'name': PowerOffVM_Task, 'duration_secs': 0.263861} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.568208] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 728.568389] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 728.568664] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecbcaa05-11a9-4d13-8608-50846025efbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.642384] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 728.642712] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 728.642977] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Deleting the datastore file [datastore1] 303c7ee1-8d26-460b-aab9-d55c71cf8a73 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 728.643376] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31ad145a-6538-4368-a094-000940282b43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.651503] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for the task: (returnval){ [ 728.651503] env[69982]: value = "task-3864270" [ 728.651503] env[69982]: _type = "Task" [ 728.651503] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.663644] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.829225] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.859803] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 728.860121] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 728.860286] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 728.860466] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 728.860615] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 728.860844] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 728.861093] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 728.861273] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 728.861483] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 728.861611] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 728.861822] env[69982]: DEBUG nova.virt.hardware [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 728.864791] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35562ec1-7dd1-4be9-9e48-f5660b1b0822 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.875288] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4087fabd-9320-4ea2-883a-19462e907d20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.918608] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 728.939286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.988403] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864268, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086438} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.989143] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 728.990521] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a1a243-bfa7-4846-9f5b-ad83e2e1a163 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.017969] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 5ba60eb7-ee6f-47e2-b6ca-b54817dab371/5ba60eb7-ee6f-47e2-b6ca-b54817dab371.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 729.020594] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb964e1-7083-4131-9b35-f0fc89692d87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.041466] env[69982]: DEBUG nova.scheduler.client.report [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 729.050162] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 729.050162] env[69982]: value = "task-3864271" [ 729.050162] env[69982]: _type = "Task" [ 729.050162] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.057081] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.164825] env[69982]: DEBUG oslo_vmware.api [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Task: {'id': task-3864270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.417248} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.165173] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 729.165379] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 729.165559] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 729.165738] env[69982]: INFO nova.compute.manager [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 1.14 seconds to destroy the instance on the hypervisor. [ 729.165991] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.166194] env[69982]: DEBUG nova.compute.manager [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 729.166293] env[69982]: DEBUG nova.network.neutron [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 729.459363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.523893] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updated VIF entry in instance network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 729.524295] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.546756] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.547315] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 729.550106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.995s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.552068] env[69982]: INFO nova.compute.claims [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.555545] env[69982]: DEBUG nova.network.neutron [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updated VIF entry in instance network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 729.555877] env[69982]: DEBUG nova.network.neutron [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.568795] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.823220] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Successfully updated port: 061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 729.860992] env[69982]: DEBUG nova.network.neutron [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.006032] env[69982]: DEBUG nova.objects.instance [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lazy-loading 'flavor' on Instance uuid 743a4a52-ce35-4ec1-9286-e0c470e87186 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.026717] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.027061] env[69982]: DEBUG nova.compute.manager [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.027521] env[69982]: DEBUG nova.compute.manager [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing instance network info cache due to event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 730.027942] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.059922] env[69982]: DEBUG nova.compute.utils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.068568] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 730.068795] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.075630] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4799baf-abd7-42d0-b5bd-8c335ae224db req-8e4a42d3-1b5c-4482-8ea2-6fa5a53e16f5 service nova] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.076907] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.077739] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 730.090192] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864271, 'name': ReconfigVM_Task, 'duration_secs': 0.617862} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.092030] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 5ba60eb7-ee6f-47e2-b6ca-b54817dab371/5ba60eb7-ee6f-47e2-b6ca-b54817dab371.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 730.093036] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ca7729d6-c85f-42e3-833b-5159ee3a1e2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.103979] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 730.103979] env[69982]: value = "task-3864272" [ 730.103979] env[69982]: _type = "Task" [ 730.103979] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.115653] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864272, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.195586] env[69982]: DEBUG nova.policy [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fdf1d6b2c53540ddbbef7aadc30dd230', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1aa28abe6634d73af73c67ff6ef8635', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.328202] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.328202] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.328202] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 730.365976] env[69982]: INFO nova.compute.manager [-] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Took 1.94 seconds to deallocate network for instance. [ 730.444271] env[69982]: DEBUG nova.network.neutron [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.511700] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.512039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.577385] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 730.626025] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864272, 'name': Rename_Task, 'duration_secs': 0.357989} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.627251] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.627688] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c5fbf98-d4d1-4d8d-98ff-9811b04a7a11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.640774] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 730.640774] env[69982]: value = "task-3864273" [ 730.640774] env[69982]: _type = "Task" [ 730.640774] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.652669] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.736685] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Successfully created port: 5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.877863] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.917707] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 730.949898] env[69982]: INFO nova.compute.manager [-] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Took 1.78 seconds to deallocate network for instance. [ 731.153795] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864273, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.170250] env[69982]: INFO nova.compute.manager [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Rebuilding instance [ 731.212439] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d193cc-44c2-45f0-b4a0-4c89395e7d84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.222275] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58b7526-8cad-40f2-bf05-e42adc241afb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.265972] env[69982]: DEBUG nova.network.neutron [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updating instance_info_cache with network_info: [{"id": "061b4fb2-6c43-46cc-87f5-b307884088b1", "address": "fa:16:3e:0c:75:da", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap061b4fb2-6c", "ovs_interfaceid": "061b4fb2-6c43-46cc-87f5-b307884088b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.266635] env[69982]: DEBUG nova.compute.manager [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.267554] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de98b9fa-400f-46f2-aa88-dcfd2ff2b559 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.270955] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b263180c-fe77-4422-927b-6bdcdf5f9d88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.289168] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f489c08-8ddb-4a82-9cd7-15064616423e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.309371] env[69982]: DEBUG nova.compute.provider_tree [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.315285] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updated VIF entry in instance network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 731.315285] env[69982]: DEBUG nova.network.neutron [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.316166] env[69982]: DEBUG nova.compute.manager [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Received event network-vif-plugged-061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 731.316446] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.316636] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.316836] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.317127] env[69982]: DEBUG nova.compute.manager [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] No waiting events found dispatching network-vif-plugged-061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 731.317371] env[69982]: WARNING nova.compute.manager [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Received unexpected event network-vif-plugged-061b4fb2-6c43-46cc-87f5-b307884088b1 for instance with vm_state building and task_state spawning. [ 731.317549] env[69982]: DEBUG nova.compute.manager [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Received event network-changed-061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 731.317697] env[69982]: DEBUG nova.compute.manager [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Refreshing instance network info cache due to event network-changed-061b4fb2-6c43-46cc-87f5-b307884088b1. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 731.317894] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Acquiring lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.459512] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.501793] env[69982]: DEBUG nova.compute.manager [req-1ee6a3b6-054f-48df-89f5-7fbb4150fe03 req-b23ccd37-b2ff-4df9-81ef-86fead14dc14 service nova] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Received event network-vif-deleted-8e565aa3-376d-4b91-8dac-bc818531956d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 731.502952] env[69982]: DEBUG nova.compute.manager [req-1ee6a3b6-054f-48df-89f5-7fbb4150fe03 req-b23ccd37-b2ff-4df9-81ef-86fead14dc14 service nova] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Received event network-vif-deleted-584c8c37-6f38-42cb-853e-a6a8f1135e96 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 731.594839] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 731.615254] env[69982]: DEBUG nova.network.neutron [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 731.643690] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 731.643690] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.643819] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 731.644089] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.644352] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 731.644677] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 731.644984] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 731.645307] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 731.645611] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 731.645924] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 731.646321] env[69982]: DEBUG nova.virt.hardware [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 731.648575] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f7c34c-79ed-4c15-8f18-c87f9d03a30b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.662049] env[69982]: DEBUG oslo_vmware.api [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864273, 'name': PowerOnVM_Task, 'duration_secs': 0.838045} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.664494] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 731.664759] env[69982]: INFO nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Took 10.60 seconds to spawn the instance on the hypervisor. [ 731.664905] env[69982]: DEBUG nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 731.665908] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a65379-056e-42b6-800d-a3c4af5d2a75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.670272] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40abcde9-26f8-4a71-8179-e575f0a03b5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.774934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.775415] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Instance network_info: |[{"id": "061b4fb2-6c43-46cc-87f5-b307884088b1", "address": "fa:16:3e:0c:75:da", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap061b4fb2-6c", "ovs_interfaceid": "061b4fb2-6c43-46cc-87f5-b307884088b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 731.775734] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Acquired lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.775912] env[69982]: DEBUG nova.network.neutron [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Refreshing network info cache for port 061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 731.777958] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:75:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '061b4fb2-6c43-46cc-87f5-b307884088b1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 731.785694] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 731.786945] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 731.787215] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7127d80b-ac5b-4fa1-bedf-13913fc16879 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.812206] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 731.812206] env[69982]: value = "task-3864274" [ 731.812206] env[69982]: _type = "Task" [ 731.812206] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.824565] env[69982]: DEBUG nova.scheduler.client.report [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 731.827180] env[69982]: DEBUG oslo_concurrency.lockutils [req-96b6d6e3-f3fa-41a8-857b-86467056c346 req-f73e8d6f-f8ff-4f15-9918-9c0667ae7f85 service nova] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.827857] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864274, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.963332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.963576] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.203088] env[69982]: INFO nova.compute.manager [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Took 32.65 seconds to build instance. [ 732.308644] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 732.308644] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-829f9d7b-021b-4ca9-8213-f566242984cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.319426] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 732.319426] env[69982]: value = "task-3864275" [ 732.319426] env[69982]: _type = "Task" [ 732.319426] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.328511] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864274, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.329862] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.780s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.330638] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 732.337736] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.445s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.337936] env[69982]: DEBUG nova.objects.instance [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lazy-loading 'resources' on Instance uuid 78ba4fa9-4083-4204-a5b4-88cdcec6ca13 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 732.339414] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.707345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-09c7cf1c-1f04-48ee-8a0b-b2085debe2bc tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.382s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.745388] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Successfully updated port: 5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.824759] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864274, 'name': CreateVM_Task, 'duration_secs': 0.672398} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.829417] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 732.830982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.831459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.831892] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 732.833079] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e741bf6-739c-40f4-a0dd-fa447593c1cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.839854] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864275, 'name': PowerOffVM_Task, 'duration_secs': 0.332368} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.840597] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 732.840968] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 732.842632] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af5307b-4bea-40b6-b8b5-c09d05387f20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.850130] env[69982]: DEBUG nova.compute.utils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 732.853655] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 732.853655] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52458f99-0609-bdc8-852f-a62af9f28d6e" [ 732.853655] env[69982]: _type = "Task" [ 732.853655] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.856839] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 732.857036] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 732.869575] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 732.870400] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55c04add-da7e-41c6-bfad-15ff07ea4e85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.878205] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52458f99-0609-bdc8-852f-a62af9f28d6e, 'name': SearchDatastore_Task, 'duration_secs': 0.031322} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.881265] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.881568] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 732.881816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.881983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.882190] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 732.882796] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe1428e6-ba53-41de-abee-52c9f07e5b17 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.897024] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 732.897024] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 732.897024] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99b84222-24a3-48f9-bd97-269c27e0d1f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.900920] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 732.900920] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f66a2c-9621-e5cc-2b77-d10e673ca036" [ 732.900920] env[69982]: _type = "Task" [ 732.900920] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.915979] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f66a2c-9621-e5cc-2b77-d10e673ca036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.960628] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 732.960836] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 732.961031] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 732.961453] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-567bb456-33da-42d8-9360-eb6e0bf0012f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.970022] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 732.970022] env[69982]: value = "task-3864277" [ 732.970022] env[69982]: _type = "Task" [ 732.970022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.980760] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.038600] env[69982]: DEBUG nova.policy [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493da3ee04094ba4ac17893d999ac99e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc588ded27b49d4826535649105aa88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 733.069338] env[69982]: DEBUG nova.network.neutron [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.091196] env[69982]: DEBUG nova.network.neutron [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updated VIF entry in instance network info cache for port 061b4fb2-6c43-46cc-87f5-b307884088b1. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 733.091575] env[69982]: DEBUG nova.network.neutron [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updating instance_info_cache with network_info: [{"id": "061b4fb2-6c43-46cc-87f5-b307884088b1", "address": "fa:16:3e:0c:75:da", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap061b4fb2-6c", "ovs_interfaceid": "061b4fb2-6c43-46cc-87f5-b307884088b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.215735] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 733.249612] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.249854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquired lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.250026] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 733.355028] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.417593] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f66a2c-9621-e5cc-2b77-d10e673ca036, 'name': SearchDatastore_Task, 'duration_secs': 0.017075} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.422982] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-871c83fc-abd8-4806-8d2e-bbb2b8b66193 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.438464] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 733.438464] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52259272-ae15-4b86-6cc0-69d493e69e9e" [ 733.438464] env[69982]: _type = "Task" [ 733.438464] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.452457] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52259272-ae15-4b86-6cc0-69d493e69e9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.483806] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.492261] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a11c818-87dc-41a8-81ad-8be5a9a6caa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.502476] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9760e00b-2c01-428c-bb18-1f87ea3fd16a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.543219] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6278d93d-921b-4cf2-ae53-547118bd64a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.552231] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb2c7d4-8e6b-49c7-b5dc-d1119bace034 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.574444] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.574671] env[69982]: DEBUG nova.compute.manager [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Inject network info {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 733.575195] env[69982]: DEBUG nova.compute.manager [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] network_info to inject: |[{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 733.582913] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfiguring VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 733.583677] env[69982]: DEBUG nova.compute.provider_tree [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 733.586147] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Successfully created port: 7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 733.588787] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7fbce551-3444-449f-aa4b-1c698d1d417a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.604220] env[69982]: DEBUG nova.scheduler.client.report [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 733.607932] env[69982]: DEBUG oslo_concurrency.lockutils [req-eaf8d259-0750-471a-afec-1d559fd6949e req-e9894bc5-383c-4a63-9525-510f5be65837 service nova] Releasing lock "refresh_cache-3e109fff-94bd-41a9-bc43-373143b7fda5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.615435] env[69982]: DEBUG oslo_vmware.api [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 733.615435] env[69982]: value = "task-3864278" [ 733.615435] env[69982]: _type = "Task" [ 733.615435] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.628231] env[69982]: DEBUG oslo_vmware.api [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864278, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.743862] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 733.789162] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 733.950914] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52259272-ae15-4b86-6cc0-69d493e69e9e, 'name': SearchDatastore_Task, 'duration_secs': 0.013702} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.951212] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 733.951496] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3e109fff-94bd-41a9-bc43-373143b7fda5/3e109fff-94bd-41a9-bc43-373143b7fda5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 733.951778] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ec6e5a2-add2-4ab0-bc3b-d987ca393db8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.959574] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 733.959574] env[69982]: value = "task-3864279" [ 733.959574] env[69982]: _type = "Task" [ 733.959574] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.970266] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864279, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.980010] env[69982]: DEBUG nova.network.neutron [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Updating instance_info_cache with network_info: [{"id": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "address": "fa:16:3e:e9:bf:b2", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bd32150-60", "ovs_interfaceid": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.987806] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.610754} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.988341] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 733.988521] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 733.988693] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 734.001399] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Received event network-changed-ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.002018] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Refreshing instance network info cache due to event network-changed-ad716338-99c7-49f2-a530-97e342fab644. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 734.002018] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquiring lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.002018] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquired lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.002188] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Refreshing network info cache for port ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 734.111799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.114353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.097s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.116822] env[69982]: INFO nova.compute.claims [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 734.129297] env[69982]: DEBUG oslo_vmware.api [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864278, 'name': ReconfigVM_Task, 'duration_secs': 0.166948} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.129649] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d06e4551-43df-4828-a9fa-3fc3f3ab497e tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Reconfigured VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 734.135740] env[69982]: INFO nova.scheduler.client.report [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Deleted allocations for instance 78ba4fa9-4083-4204-a5b4-88cdcec6ca13 [ 734.347287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "743a4a52-ce35-4ec1-9286-e0c470e87186" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.347287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.347287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.347287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.347592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.348937] env[69982]: INFO nova.compute.manager [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Terminating instance [ 734.370231] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 734.401384] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 734.401648] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.401807] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 734.402023] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.406053] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 734.406228] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 734.406485] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 734.406567] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 734.406794] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 734.406901] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 734.407087] env[69982]: DEBUG nova.virt.hardware [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 734.408414] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e22c4f3-858c-4a7b-b27d-cc035628d4f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.418389] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be354f4-0723-4252-a2f1-b624a2eaa1bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.473794] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864279, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.490462] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Releasing lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.494026] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Instance network_info: |[{"id": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "address": "fa:16:3e:e9:bf:b2", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bd32150-60", "ovs_interfaceid": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 734.494228] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:bf:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bd32150-607c-4d0c-8e66-e2d5014aca93', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.502545] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Creating folder: Project (e1aa28abe6634d73af73c67ff6ef8635). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.507874] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-935fed88-0ab7-41f9-ab33-280b7b746bf2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.528447] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Created folder: Project (e1aa28abe6634d73af73c67ff6ef8635) in parent group-v767796. [ 734.528447] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Creating folder: Instances. Parent ref: group-v767880. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 734.528447] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a544545-30ac-47fc-821b-fdce0783f59e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.539020] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Created folder: Instances in parent group-v767880. [ 734.539334] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 734.539711] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.540087] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6f7fe58-80e1-45fa-bd96-27543490a353 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.565619] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.565619] env[69982]: value = "task-3864282" [ 734.565619] env[69982]: _type = "Task" [ 734.565619] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.575664] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864282, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.647085] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ec0e902-e3cb-49d3-be52-21193c511d4f tempest-ServerDiagnosticsTest-1455254523 tempest-ServerDiagnosticsTest-1455254523-project-member] Lock "78ba4fa9-4083-4204-a5b4-88cdcec6ca13" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.265s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.751455] env[69982]: DEBUG nova.compute.manager [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Received event network-vif-plugged-5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.751730] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Acquiring lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.752240] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.752486] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 734.752669] env[69982]: DEBUG nova.compute.manager [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] No waiting events found dispatching network-vif-plugged-5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 734.752835] env[69982]: WARNING nova.compute.manager [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Received unexpected event network-vif-plugged-5bd32150-607c-4d0c-8e66-e2d5014aca93 for instance with vm_state building and task_state spawning. [ 734.752994] env[69982]: DEBUG nova.compute.manager [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Received event network-changed-5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.753166] env[69982]: DEBUG nova.compute.manager [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Refreshing instance network info cache due to event network-changed-5bd32150-607c-4d0c-8e66-e2d5014aca93. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 734.753369] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Acquiring lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.753502] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Acquired lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.753678] env[69982]: DEBUG nova.network.neutron [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Refreshing network info cache for port 5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 734.853886] env[69982]: DEBUG nova.compute.manager [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 734.854176] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 734.855834] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc098dd-2488-4379-a0cb-5acf4e5719e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.864900] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 734.865321] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a497eb51-b929-4213-9e2c-0ecfa114275a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.874246] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 734.874246] env[69982]: value = "task-3864283" [ 734.874246] env[69982]: _type = "Task" [ 734.874246] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.897055] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.913482] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updated VIF entry in instance network info cache for port ad716338-99c7-49f2-a530-97e342fab644. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 734.916016] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updating instance_info_cache with network_info: [{"id": "ad716338-99c7-49f2-a530-97e342fab644", "address": "fa:16:3e:5a:b3:6d", "network": {"id": "817d38e9-0594-4e2f-8145-2d18050b1e49", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-184903772-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.169", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ce3af012c140a08fb2b992dacd15aa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad716338-99", "ovs_interfaceid": "ad716338-99c7-49f2-a530-97e342fab644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.976308] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864279, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575972} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.976537] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3e109fff-94bd-41a9-bc43-373143b7fda5/3e109fff-94bd-41a9-bc43-373143b7fda5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 734.976770] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 734.977050] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e5a7387-ce6d-457c-af57-1adbfdee0339 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.987628] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 734.987628] env[69982]: value = "task-3864284" [ 734.987628] env[69982]: _type = "Task" [ 734.987628] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.998213] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864284, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.053064] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 735.053064] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.053064] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 735.053837] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.054373] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 735.054594] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 735.054921] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 735.055660] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 735.055954] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 735.056242] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 735.056554] env[69982]: DEBUG nova.virt.hardware [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 735.057817] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43837c5-6d48-41f5-b3fc-237dda71db4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.078152] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005a1e1c-8b2c-4a53-be72-fa256d3a40bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.089270] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864282, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.100832] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:a1:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 735.109900] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 735.110134] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 735.110399] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c964d06e-5ef7-40aa-a9e8-22d2728ff79a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.136596] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 735.136849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.143253] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 735.143253] env[69982]: value = "task-3864285" [ 735.143253] env[69982]: _type = "Task" [ 735.143253] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.156051] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864285, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.388020] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864283, 'name': PowerOffVM_Task, 'duration_secs': 0.235037} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.388020] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 735.388020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 735.388020] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b362b036-d76d-47c2-9615-9605f54c7217 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.420140] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Releasing lock "refresh_cache-5743a020-0c09-45ec-aca4-5ce367cc201a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.420140] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 735.420279] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing instance network info cache due to event network-changed-bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 735.420484] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquiring lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.420624] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquired lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.420781] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Refreshing network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 735.475567] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 735.475567] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 735.476353] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Deleting the datastore file [datastore2] 743a4a52-ce35-4ec1-9286-e0c470e87186 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 735.476682] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54259a1d-c387-45e0-8d09-8d8198a0804e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.487519] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for the task: (returnval){ [ 735.487519] env[69982]: value = "task-3864287" [ 735.487519] env[69982]: _type = "Task" [ 735.487519] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.505971] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864284, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.22952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.510430] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 735.511091] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864287, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.511842] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc7afb8-a00a-4f2a-8b92-bf6cc4951793 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.542644] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 3e109fff-94bd-41a9-bc43-373143b7fda5/3e109fff-94bd-41a9-bc43-373143b7fda5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 735.548482] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b5d4c86-5813-4512-b62e-b33e52b4c274 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.565098] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Successfully updated port: 7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 735.567220] env[69982]: DEBUG nova.network.neutron [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Updated VIF entry in instance network info cache for port 5bd32150-607c-4d0c-8e66-e2d5014aca93. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 735.567612] env[69982]: DEBUG nova.network.neutron [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Updating instance_info_cache with network_info: [{"id": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "address": "fa:16:3e:e9:bf:b2", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bd32150-60", "ovs_interfaceid": "5bd32150-607c-4d0c-8e66-e2d5014aca93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.576553] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 735.576553] env[69982]: value = "task-3864288" [ 735.576553] env[69982]: _type = "Task" [ 735.576553] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.586423] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864282, 'name': CreateVM_Task, 'duration_secs': 0.561326} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.589726] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.591139] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.591379] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.592247] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 735.596155] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e3beaf2-dee6-4613-9092-d8f4a24dc8d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.598520] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.602632] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 735.602632] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528e49f5-aefb-a556-b2a1-740e61d10a94" [ 735.602632] env[69982]: _type = "Task" [ 735.602632] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.620815] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528e49f5-aefb-a556-b2a1-740e61d10a94, 'name': SearchDatastore_Task, 'duration_secs': 0.010035} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.621581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.621581] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.621806] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.621837] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.622122] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.622364] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-039857e7-1475-4f46-b454-4790ea4876c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.637760] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.637996] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.638808] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccdfe6df-4db0-4de4-9e11-1f95f67bdba1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.651624] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 735.651624] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5270e3d3-e318-efbd-31ca-bb6edafbb1dc" [ 735.651624] env[69982]: _type = "Task" [ 735.651624] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.658761] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864285, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.668459] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5270e3d3-e318-efbd-31ca-bb6edafbb1dc, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.669640] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14bf6a1c-8b63-4322-b928-c54d13d6957d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.680718] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 735.680718] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bf857c-fa86-6d2c-25b2-630de509f60c" [ 735.680718] env[69982]: _type = "Task" [ 735.680718] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.689977] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bf857c-fa86-6d2c-25b2-630de509f60c, 'name': SearchDatastore_Task, 'duration_secs': 0.010561} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.692846] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.693010] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7/bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.694241] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-960b75af-fa33-4e3c-93f0-29f6bd86989c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.700326] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 735.700326] env[69982]: value = "task-3864289" [ 735.700326] env[69982]: _type = "Task" [ 735.700326] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.711039] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864289, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.764141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d3af4b-2303-430b-86ca-a5fb4f5f3823 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.773915] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd47488-8e61-425d-9565-48b1d984ae4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.802526] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb73b07c-8425-4d01-a273-bcd7fbb76c74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.810714] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3984dbd-8fef-4028-a6d4-473aa7972a51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.827027] env[69982]: DEBUG nova.compute.provider_tree [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.999402] env[69982]: DEBUG oslo_vmware.api [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Task: {'id': task-3864287, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187397} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.999600] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 735.999852] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 736.000108] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 736.000332] env[69982]: INFO nova.compute.manager [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Took 1.15 seconds to destroy the instance on the hypervisor. [ 736.000646] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 736.000915] env[69982]: DEBUG nova.compute.manager [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 736.001031] env[69982]: DEBUG nova.network.neutron [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 736.071084] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.071084] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.071084] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 736.077097] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4c3cbeb-cc18-4d07-a533-5539ce8bab8a req-6167f79f-c2fb-441a-8b88-6f458e1193a6 service nova] Releasing lock "refresh_cache-bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.102362] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.162444] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864285, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.219815] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864289, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.333445] env[69982]: DEBUG nova.scheduler.client.report [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 736.364972] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updated VIF entry in instance network info cache for port bbcc00a3-079c-427d-9966-920e8614cd44. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 736.364972] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [{"id": "bbcc00a3-079c-427d-9966-920e8614cd44", "address": "fa:16:3e:75:34:19", "network": {"id": "313bbe16-3b51-4604-bab6-bc77e5fd0593", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1442966602-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aeecb3137ebc4b5fa087ef207104755c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbbcc00a3-07", "ovs_interfaceid": "bbcc00a3-079c-427d-9966-920e8614cd44", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.593567] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.635161] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 736.660108] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864285, 'name': CreateVM_Task, 'duration_secs': 1.446656} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.660472] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 736.661295] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.661592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.662058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 736.662463] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e9561b3-32ad-4900-860e-cbbba77ea44a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.668580] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 736.668580] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5285a061-a0bd-b0de-71d0-383ad9428408" [ 736.668580] env[69982]: _type = "Task" [ 736.668580] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.678437] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5285a061-a0bd-b0de-71d0-383ad9428408, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.715267] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864289, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.844276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.844276] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 736.849337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.747s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.849769] env[69982]: DEBUG nova.objects.instance [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lazy-loading 'resources' on Instance uuid 9d1b0a5f-e096-4641-a077-f0949135efbb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 736.869111] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Releasing lock "refresh_cache-743a4a52-ce35-4ec1-9286-e0c470e87186" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.869111] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 736.869111] env[69982]: DEBUG nova.compute.manager [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing instance network info cache due to event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 736.869111] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.869111] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.869391] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 736.883028] env[69982]: DEBUG nova.network.neutron [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Updating instance_info_cache with network_info: [{"id": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "address": "fa:16:3e:2a:06:fe", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2fed4f-ea", "ovs_interfaceid": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.096512] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864288, 'name': ReconfigVM_Task, 'duration_secs': 1.262668} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.096512] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 3e109fff-94bd-41a9-bc43-373143b7fda5/3e109fff-94bd-41a9-bc43-373143b7fda5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 737.097160] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f33865b8-0ea6-4fd4-b442-0eed5f521402 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.105351] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 737.105351] env[69982]: value = "task-3864290" [ 737.105351] env[69982]: _type = "Task" [ 737.105351] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.114531] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864290, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.180535] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5285a061-a0bd-b0de-71d0-383ad9428408, 'name': SearchDatastore_Task, 'duration_secs': 0.018368} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.181153] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.181153] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 737.181329] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.181480] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.181650] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 737.181949] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e8b1251-7e3d-4810-9375-9fac67fd14a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.191918] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 737.192170] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 737.193051] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e5533f9-6442-4d4a-b4ca-d5644807296d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.199999] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 737.199999] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b3085e-1fd6-21a0-6f9a-01bfd869dad9" [ 737.199999] env[69982]: _type = "Task" [ 737.199999] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.217295] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b3085e-1fd6-21a0-6f9a-01bfd869dad9, 'name': SearchDatastore_Task, 'duration_secs': 0.010525} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.223238] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864289, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.223238] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a16290b-42dc-49f3-9ec6-798b4151137f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.230883] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 737.230883] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ffa5de-f17d-d5e2-66fb-3e35646a2c50" [ 737.230883] env[69982]: _type = "Task" [ 737.230883] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.247954] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ffa5de-f17d-d5e2-66fb-3e35646a2c50, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.248124] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.248470] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 737.248801] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea26881c-2e28-498f-96b8-594bb0a6fdfb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.257556] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 737.257556] env[69982]: value = "task-3864291" [ 737.257556] env[69982]: _type = "Task" [ 737.257556] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.267159] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.317276] env[69982]: DEBUG nova.network.neutron [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.354112] env[69982]: DEBUG nova.compute.utils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 737.361697] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 737.386154] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 737.386560] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Instance network_info: |[{"id": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "address": "fa:16:3e:2a:06:fe", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2fed4f-ea", "ovs_interfaceid": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 737.390302] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:06:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31a7f15-a808-4199-9071-31fd05e316ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c2fed4f-eaa2-49d4-9df7-62d75b592224', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 737.399033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating folder: Project (9cc588ded27b49d4826535649105aa88). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.400253] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60fbbdc5-8aae-4192-93d0-dddd67cb46ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.411752] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created folder: Project (9cc588ded27b49d4826535649105aa88) in parent group-v767796. [ 737.412033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating folder: Instances. Parent ref: group-v767884. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 737.412311] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2802cb28-9e1b-4a23-8aa9-0a12db3fc0d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.430088] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created folder: Instances in parent group-v767884. [ 737.430492] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 737.430692] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 737.430964] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-897c0f43-65cb-4901-9dab-c99ab47b8344 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.463705] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.464841] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing instance network info cache due to event network-changed-6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.464841] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Acquiring lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.471620] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 737.471620] env[69982]: value = "task-3864294" [ 737.471620] env[69982]: _type = "Task" [ 737.471620] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.487190] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864294, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.576312] env[69982]: DEBUG nova.compute.manager [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received event network-changed-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.576508] env[69982]: DEBUG nova.compute.manager [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing instance network info cache due to event network-changed-aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.580030] env[69982]: DEBUG oslo_concurrency.lockutils [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] Acquiring lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.580030] env[69982]: DEBUG oslo_concurrency.lockutils [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] Acquired lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 737.580030] env[69982]: DEBUG nova.network.neutron [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing network info cache for port aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 737.617409] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864290, 'name': Rename_Task, 'duration_secs': 0.149339} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.618819] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.618819] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffeceadb-7492-46f7-8afc-760a2cd31b11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.628268] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 737.628268] env[69982]: value = "task-3864295" [ 737.628268] env[69982]: _type = "Task" [ 737.628268] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.643873] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.723021] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864289, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.554772} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.723021] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7/bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.723021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.723021] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f8e9364-f0a5-413a-8750-0e6c99cf8e86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.731665] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 737.731665] env[69982]: value = "task-3864296" [ 737.731665] env[69982]: _type = "Task" [ 737.731665] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.748276] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.755818] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updated VIF entry in instance network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 737.756393] env[69982]: DEBUG nova.network.neutron [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.777268] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864291, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.822260] env[69982]: INFO nova.compute.manager [-] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Took 1.82 seconds to deallocate network for instance. [ 737.866021] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 737.983406] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864294, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.082735] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac43f1aa-1f40-45e8-9fcf-398639747d4c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.091194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "ebd9e006-a591-44f7-867c-041731b9d45a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.091194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.091385] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.091651] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.091828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.094552] env[69982]: INFO nova.compute.manager [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Terminating instance [ 738.105027] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5565a11a-0266-45d0-b708-8d9dc0acc42c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.144712] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5eedefe-4e5b-414e-b088-3c4f3f7f3c47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.154328] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864295, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.161098] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669a3c0b-6ca1-49ce-9fa2-d4ff7ca4bc07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.178037] env[69982]: DEBUG nova.compute.provider_tree [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.244901] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.25308} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.245217] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.246193] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806ec000-1220-46cc-90a9-1806034d88a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.265784] env[69982]: DEBUG oslo_concurrency.lockutils [req-5f0e8ced-2f22-47de-b1e2-52604bf41c68 req-81137b97-6ca2-490e-a8d5-2099c2178dc4 service nova] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.275196] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7/bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.275563] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Acquired lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.275749] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Refreshing network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 738.280373] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b88c346-6117-4c18-895c-6d073ecbb988 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.306513] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544202} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.307915] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 738.308151] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 738.308482] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 738.308482] env[69982]: value = "task-3864297" [ 738.308482] env[69982]: _type = "Task" [ 738.308482] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.308684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56ecc75c-87ad-4d0a-a9a3-4a12f938bf2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.320288] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864297, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.321768] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 738.321768] env[69982]: value = "task-3864298" [ 738.321768] env[69982]: _type = "Task" [ 738.321768] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.340205] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.484504] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864294, 'name': CreateVM_Task, 'duration_secs': 0.705359} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.484702] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 738.489043] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.489231] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.489603] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 738.489890] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7f8a0ca-b788-41a6-8dd2-d8c3f2973186 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.495869] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 738.495869] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6b069-aa33-4cc2-db09-d08028de3f10" [ 738.495869] env[69982]: _type = "Task" [ 738.495869] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.508336] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6b069-aa33-4cc2-db09-d08028de3f10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.608863] env[69982]: DEBUG nova.compute.manager [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.609098] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.610102] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1332226-30b0-44a3-8b27-115edcb8e7e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.618370] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.618712] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88d912e1-d877-4ca1-a779-2250de7527b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.629033] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 738.629033] env[69982]: value = "task-3864299" [ 738.629033] env[69982]: _type = "Task" [ 738.629033] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.638118] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.659460] env[69982]: DEBUG oslo_vmware.api [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864295, 'name': PowerOnVM_Task, 'duration_secs': 0.691451} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.659460] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.660816] env[69982]: INFO nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Took 9.83 seconds to spawn the instance on the hypervisor. [ 738.660816] env[69982]: DEBUG nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.661634] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9749e1b5-a3fe-4777-b355-80f2017fbe1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.682707] env[69982]: DEBUG nova.scheduler.client.report [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 738.828612] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864297, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.842217] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086419} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.843050] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 738.843860] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3f25b8-2ff3-48a8-b986-15853722a30a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.871470] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 738.871763] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0b988f5-9d96-47e8-b64b-a925391c0e03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.888127] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 738.896807] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 738.896807] env[69982]: value = "task-3864300" [ 738.896807] env[69982]: _type = "Task" [ 738.896807] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.910842] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.930305] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 738.930572] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.930782] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 738.931032] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.931191] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 738.931340] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 738.931572] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 738.931785] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 738.931992] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 738.932192] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 738.932372] env[69982]: DEBUG nova.virt.hardware [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 738.933285] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9b43eb0-af1a-4e0d-ab27-aaf280e3de3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.945241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02193772-04ad-4716-b7c1-eb15df53b46b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.960965] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 738.966715] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Creating folder: Project (800a3b6245374b219a71d4704d7170bf). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.970193] env[69982]: DEBUG nova.network.neutron [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updated VIF entry in instance network info cache for port aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 738.970566] env[69982]: DEBUG nova.network.neutron [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [{"id": "aead0d1e-bc05-4064-8494-624226340060", "address": "fa:16:3e:3c:80:7e", "network": {"id": "d085c3c7-14e1-4907-8ba4-c86bdcc6d4d9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-258973916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75340e5596304ad38d40e450a5425cba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563", "external-id": "nsx-vlan-transportzone-931", "segmentation_id": 931, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaead0d1e-bc", "ovs_interfaceid": "aead0d1e-bc05-4064-8494-624226340060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.971829] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ec94dc9-4ae5-4963-9d3d-217e16a7d9f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.985947] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Created folder: Project (800a3b6245374b219a71d4704d7170bf) in parent group-v767796. [ 738.986233] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Creating folder: Instances. Parent ref: group-v767887. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 738.987264] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b424a7a-6b9c-46f6-8672-e68b3f573686 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.001693] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Created folder: Instances in parent group-v767887. [ 739.001990] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.002673] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.002796] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9b65c331-0dc5-4b5f-aca5-189f42dfd1dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.021434] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6b069-aa33-4cc2-db09-d08028de3f10, 'name': SearchDatastore_Task, 'duration_secs': 0.011177} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.022285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.022584] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.022893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.023783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.023783] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.023783] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61752803-e58a-4e90-97b9-7153c843f218 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.027376] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.027376] env[69982]: value = "task-3864303" [ 739.027376] env[69982]: _type = "Task" [ 739.027376] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.033181] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.033472] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.037623] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a3c7cf5-cca2-4610-a778-37afdd1a40ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.040479] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864303, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.044247] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 739.044247] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525877e9-afd6-88e3-9565-79978761de61" [ 739.044247] env[69982]: _type = "Task" [ 739.044247] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.053356] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525877e9-afd6-88e3-9565-79978761de61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.139886] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864299, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.181225] env[69982]: INFO nova.compute.manager [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Took 33.88 seconds to build instance. [ 739.193731] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.196824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.110s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.199828] env[69982]: INFO nova.compute.claims [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 739.226907] env[69982]: INFO nova.scheduler.client.report [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Deleted allocations for instance 9d1b0a5f-e096-4641-a077-f0949135efbb [ 739.328283] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864297, 'name': ReconfigVM_Task, 'duration_secs': 1.018476} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.328709] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Reconfigured VM instance instance-0000001d to attach disk [datastore1] bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7/bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.333031] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7847b2b7-d3c5-4f84-bffb-30a51095d894 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.338494] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 739.338494] env[69982]: value = "task-3864304" [ 739.338494] env[69982]: _type = "Task" [ 739.338494] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.348616] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864304, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.349696] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updated VIF entry in instance network info cache for port 6db4163b-49c9-41e1-bfbb-098d83fd379d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 739.350085] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [{"id": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "address": "fa:16:3e:d0:34:2f", "network": {"id": "acf7fb63-043f-448a-a0e2-7ef2f14d6e3e", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-660342883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "20ab85756df94226800ca6a415d05d7b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b7a73c01-1bb9-4612-a1a7-16d71b732e81", "external-id": "nsx-vlan-transportzone-711", "segmentation_id": 711, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6db4163b-49", "ovs_interfaceid": "6db4163b-49c9-41e1-bfbb-098d83fd379d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.412212] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864300, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.475834] env[69982]: DEBUG oslo_concurrency.lockutils [req-d623d041-6a6d-4ae0-a986-a0049b194dde req-40618b08-a055-4f55-84d2-131c93b21cd0 service nova] Releasing lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.539819] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864303, 'name': CreateVM_Task, 'duration_secs': 0.392423} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.539975] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 739.540445] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.540642] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.541037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 739.541686] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b80d40f5-0a2f-45eb-bba9-1cb6d272eaa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.549771] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 739.549771] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b11ba2-3470-e404-3b5b-2afd5ecb9ce9" [ 739.549771] env[69982]: _type = "Task" [ 739.549771] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.558800] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525877e9-afd6-88e3-9565-79978761de61, 'name': SearchDatastore_Task, 'duration_secs': 0.013251} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.560364] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00bc818b-eaa1-410d-bc01-26a359b451c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.567203] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b11ba2-3470-e404-3b5b-2afd5ecb9ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.011772} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.567941] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.568191] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 739.568435] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.568587] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.568765] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 739.569054] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad912aa7-bcf3-4658-b71a-3908950ef181 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.572843] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 739.572843] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527c8f07-ab40-d6a9-9fb1-3bca7dfeb6ec" [ 739.572843] env[69982]: _type = "Task" [ 739.572843] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.590021] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527c8f07-ab40-d6a9-9fb1-3bca7dfeb6ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.595159] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 739.595159] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 739.595159] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8da2cc8c-ae00-4292-b2b9-7d8592532145 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.602917] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 739.602917] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be23c6-683d-8292-92a5-cb076730a634" [ 739.602917] env[69982]: _type = "Task" [ 739.602917] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.618771] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be23c6-683d-8292-92a5-cb076730a634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.642994] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864299, 'name': PowerOffVM_Task, 'duration_secs': 0.532263} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.643503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 739.644883] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 739.644883] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68fa62c2-132d-4650-8878-2f02b9fa52af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.684706] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b1db433-58d6-407e-b334-816e4c543153 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.956s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.718762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 739.718762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 739.718762] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Deleting the datastore file [datastore2] ebd9e006-a591-44f7-867c-041731b9d45a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 739.718762] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88941515-90a4-446b-a004-3ebdf676c821 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.726419] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for the task: (returnval){ [ 739.726419] env[69982]: value = "task-3864306" [ 739.726419] env[69982]: _type = "Task" [ 739.726419] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.738320] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864306, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.738998] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1cfef308-2c64-428c-88f4-eaaaedd2e89a tempest-DeleteServersAdminTestJSON-1704192969 tempest-DeleteServersAdminTestJSON-1704192969-project-member] Lock "9d1b0a5f-e096-4641-a077-f0949135efbb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.684s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.905974] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Releasing lock "refresh_cache-ebd9e006-a591-44f7-867c-041731b9d45a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.905974] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Received event network-vif-plugged-7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.905974] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Acquiring lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.905974] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.905974] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.911383] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] No waiting events found dispatching network-vif-plugged-7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.911383] env[69982]: WARNING nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Received unexpected event network-vif-plugged-7c2fed4f-eaa2-49d4-9df7-62d75b592224 for instance with vm_state building and task_state spawning. [ 739.911383] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Received event network-changed-7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.911383] env[69982]: DEBUG nova.compute.manager [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Refreshing instance network info cache due to event network-changed-7c2fed4f-eaa2-49d4-9df7-62d75b592224. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 739.911383] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Acquiring lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.912017] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Acquired lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 739.912017] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Refreshing network info cache for port 7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 739.912017] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864304, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.912017] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864300, 'name': ReconfigVM_Task, 'duration_secs': 0.565082} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.912328] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Reconfigured VM instance instance-00000019 to attach disk [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63/f9124657-d1c5-4a93-9d4a-3b06ca60ec63.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 739.912328] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c74d56a-0d5e-4f0f-8d95-4de4bcf92971 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.917876] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 739.917876] env[69982]: value = "task-3864307" [ 739.917876] env[69982]: _type = "Task" [ 739.917876] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.928511] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864307, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.982428] env[69982]: DEBUG nova.compute.manager [req-418a51b7-431f-4c3c-80db-02a447eac6f6 req-8e8d8794-b029-4523-8a56-d77c7a9f7580 service nova] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Received event network-vif-deleted-bbcc00a3-079c-427d-9966-920e8614cd44 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 740.084988] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527c8f07-ab40-d6a9-9fb1-3bca7dfeb6ec, 'name': SearchDatastore_Task, 'duration_secs': 0.015743} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.084988] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.085341] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 25957956-0d50-4b4f-8e5c-f55a1e182235/25957956-0d50-4b4f-8e5c-f55a1e182235.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.085491] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b225c599-022f-4d7a-b6b5-2157305e2221 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.094839] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 740.094839] env[69982]: value = "task-3864308" [ 740.094839] env[69982]: _type = "Task" [ 740.094839] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.106022] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.117025] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be23c6-683d-8292-92a5-cb076730a634, 'name': SearchDatastore_Task, 'duration_secs': 0.011748} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.118195] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580b1955-2e34-4adb-af31-12634e26a9c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.124789] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 740.124789] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529f59da-b364-9cff-8d16-6a8f38d77c0d" [ 740.124789] env[69982]: _type = "Task" [ 740.124789] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.139490] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529f59da-b364-9cff-8d16-6a8f38d77c0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.188911] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 740.238606] env[69982]: DEBUG oslo_vmware.api [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Task: {'id': task-3864306, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.265386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.241704] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.245384] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 740.245384] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.245384] env[69982]: INFO nova.compute.manager [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Took 1.63 seconds to destroy the instance on the hypervisor. [ 740.245384] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.245384] env[69982]: DEBUG nova.compute.manager [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 740.245649] env[69982]: DEBUG nova.network.neutron [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 740.366544] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864304, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.433104] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864307, 'name': Rename_Task, 'duration_secs': 0.256868} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.436973] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.438223] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-846916f3-1a37-4598-a828-090324d5a60f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.446201] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 740.446201] env[69982]: value = "task-3864309" [ 740.446201] env[69982]: _type = "Task" [ 740.446201] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.456625] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864309, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.608202] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864308, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.645085] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529f59da-b364-9cff-8d16-6a8f38d77c0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012469} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.646217] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.646217] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.646217] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d740fd08-5f6d-49fb-8b35-2d15f0ce11ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.657451] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 740.657451] env[69982]: value = "task-3864310" [ 740.657451] env[69982]: _type = "Task" [ 740.657451] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.671378] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864310, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.716160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.824308] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d696a0ee-c0d3-4f76-b62e-2dfc62bdf120 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.838762] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9454071f-7862-44c4-aa13-131e06b71c8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.880017] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Updated VIF entry in instance network info cache for port 7c2fed4f-eaa2-49d4-9df7-62d75b592224. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 740.880017] env[69982]: DEBUG nova.network.neutron [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Updating instance_info_cache with network_info: [{"id": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "address": "fa:16:3e:2a:06:fe", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c2fed4f-ea", "ovs_interfaceid": "7c2fed4f-eaa2-49d4-9df7-62d75b592224", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.883314] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5be7c2b-a83a-498e-8fea-0cb9f7b4b72d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.893926] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864304, 'name': Rename_Task, 'duration_secs': 1.122601} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.895067] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 740.896405] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4a9f93b3-5210-43fb-ac49-5738382fa1fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.906028] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103c54cd-8952-4aa7-861c-13165d003351 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.911308] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 740.911308] env[69982]: value = "task-3864311" [ 740.911308] env[69982]: _type = "Task" [ 740.911308] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.925866] env[69982]: DEBUG nova.compute.provider_tree [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.932650] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.942621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.942820] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.958966] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864309, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.111837] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864308, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59182} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.112288] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 25957956-0d50-4b4f-8e5c-f55a1e182235/25957956-0d50-4b4f-8e5c-f55a1e182235.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.112640] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.113126] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d21973fd-5726-4a25-a5e7-fd9ef5a964ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.124809] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 741.124809] env[69982]: value = "task-3864312" [ 741.124809] env[69982]: _type = "Task" [ 741.124809] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.136243] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.174086] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864310, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.189749] env[69982]: DEBUG nova.network.neutron [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.394806] env[69982]: DEBUG oslo_concurrency.lockutils [req-7b6d169e-8fd9-4912-a250-34ae6df34071 req-72fbb252-f96f-4703-b179-eca018de2124 service nova] Releasing lock "refresh_cache-25957956-0d50-4b4f-8e5c-f55a1e182235" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.424537] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.435058] env[69982]: DEBUG nova.scheduler.client.report [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 741.445514] env[69982]: DEBUG nova.compute.utils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 741.463344] env[69982]: DEBUG oslo_vmware.api [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864309, 'name': PowerOnVM_Task, 'duration_secs': 0.658943} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.463344] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.463344] env[69982]: DEBUG nova.compute.manager [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 741.464127] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd7fb0e-3a76-4627-9fe5-25f4c83493c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.638714] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126996} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.638986] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.641218] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea338225-388f-4521-a858-78b6d8cbbcc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.673330] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 25957956-0d50-4b4f-8e5c-f55a1e182235/25957956-0d50-4b4f-8e5c-f55a1e182235.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.678153] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-102c9360-31e7-418b-9866-1588e33e80d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.700411] env[69982]: INFO nova.compute.manager [-] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Took 1.46 seconds to deallocate network for instance. [ 741.710218] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864310, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539079} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.714042] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.714042] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.714042] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 741.714042] env[69982]: value = "task-3864313" [ 741.714042] env[69982]: _type = "Task" [ 741.714042] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.714042] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91369d75-d3ef-47b8-88c6-5b7f96b74c2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.725474] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.727298] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 741.727298] env[69982]: value = "task-3864314" [ 741.727298] env[69982]: _type = "Task" [ 741.727298] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.739495] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.923503] env[69982]: DEBUG oslo_vmware.api [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864311, 'name': PowerOnVM_Task, 'duration_secs': 0.858786} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.923503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 741.923879] env[69982]: INFO nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Took 10.33 seconds to spawn the instance on the hypervisor. [ 741.923879] env[69982]: DEBUG nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 741.927016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982b2c85-51be-4ec2-813a-b2cdf7831eba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.942898] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.943585] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 741.946244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.203s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.946747] env[69982]: DEBUG nova.objects.instance [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lazy-loading 'resources' on Instance uuid fb6d0f81-0eb1-45aa-a3ad-d3958de582c0 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 741.949006] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.986055] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.213502] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.229921] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.240134] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089384} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.240458] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 742.241498] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5729431-c68c-498a-a439-2f5686bcefc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.269866] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 742.270684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6537f455-0085-410c-90af-b7bfd74d92f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.296139] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 742.296139] env[69982]: value = "task-3864315" [ 742.296139] env[69982]: _type = "Task" [ 742.296139] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.307809] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.444721] env[69982]: INFO nova.compute.manager [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Took 34.88 seconds to build instance. [ 742.452289] env[69982]: DEBUG nova.compute.utils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 742.456279] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 742.456490] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 742.528278] env[69982]: DEBUG nova.policy [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1623cfaa9bf54f6e9396ad2b3f4c022c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9a8d7388f954868bf53433760a092c4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 742.732711] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864313, 'name': ReconfigVM_Task, 'duration_secs': 0.797808} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.736332] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 25957956-0d50-4b4f-8e5c-f55a1e182235/25957956-0d50-4b4f-8e5c-f55a1e182235.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.737356] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-139fae10-fd45-4acb-9486-a6200b5f1cfd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.747209] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 742.747209] env[69982]: value = "task-3864316" [ 742.747209] env[69982]: _type = "Task" [ 742.747209] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.761718] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864316, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.814671] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864315, 'name': ReconfigVM_Task, 'duration_secs': 0.420969} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.818011] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.818870] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e931f8f6-c383-4c68-82e9-5ec809cbdf33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.827033] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 742.827033] env[69982]: value = "task-3864317" [ 742.827033] env[69982]: _type = "Task" [ 742.827033] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.841455] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864317, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.950273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e8d727d-5863-4d43-9848-644f2927c313 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.104s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 742.959531] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 743.037677] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be46b4c6-9a0d-41bc-b18c-0882964927c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.046268] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e03c6f-4218-44fc-8d82-837e9b6a0523 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.079858] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.080160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.080630] env[69982]: INFO nova.compute.manager [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Attaching volume f3eebfb7-d3e8-424c-86f7-97bb01603ed8 to /dev/sdb [ 743.083080] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3a80ff-d3c9-4d6d-89d2-18849bef5006 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.097196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feecf10-09dc-40f2-928b-564660b94f49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.114760] env[69982]: DEBUG nova.compute.provider_tree [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.135847] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4956f6ba-59c3-45bb-a503-5d5411c50ba3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.144679] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Successfully created port: 0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 743.152334] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6744fa37-4d3f-48fd-84c1-72b0ecbb9ccd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.168859] env[69982]: DEBUG nova.virt.block_device [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updating existing volume attachment record: 6d91733e-c0d6-4545-915a-5f2c24e4afd0 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 743.225686] env[69982]: DEBUG nova.compute.manager [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Received event network-vif-deleted-6db4163b-49c9-41e1-bfbb-098d83fd379d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.225894] env[69982]: DEBUG nova.compute.manager [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received event network-changed-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.226453] env[69982]: DEBUG nova.compute.manager [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing instance network info cache due to event network-changed-aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 743.226693] env[69982]: DEBUG oslo_concurrency.lockutils [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] Acquiring lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.226834] env[69982]: DEBUG oslo_concurrency.lockutils [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] Acquired lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.226999] env[69982]: DEBUG nova.network.neutron [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Refreshing network info cache for port aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 743.260087] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864316, 'name': Rename_Task, 'duration_secs': 0.217536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.260345] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.260494] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bc0631b-70d3-4a90-907d-89a9ca828383 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.272256] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 743.272256] env[69982]: value = "task-3864318" [ 743.272256] env[69982]: _type = "Task" [ 743.272256] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.280973] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.338199] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864317, 'name': Rename_Task, 'duration_secs': 0.182732} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.338499] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 743.338910] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-670d8449-bb52-466b-b19e-4a9d96bf68c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.349313] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 743.349313] env[69982]: value = "task-3864320" [ 743.349313] env[69982]: _type = "Task" [ 743.349313] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.364981] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864320, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.457759] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 743.620836] env[69982]: DEBUG nova.scheduler.client.report [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 743.783693] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864318, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.866198] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864320, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.990803] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 744.012348] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.034094] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 744.034421] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.034511] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 744.034678] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.034822] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 744.034962] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 744.035181] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 744.035339] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 744.035570] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 744.035794] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 744.035976] env[69982]: DEBUG nova.virt.hardware [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 744.036869] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c717afd0-96a2-4013-a970-6a3fbfbfdd6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.048030] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40def029-14e7-473d-a9c2-cf1c88effa03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.131811] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.135376] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.894s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.135739] env[69982]: DEBUG nova.objects.instance [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lazy-loading 'resources' on Instance uuid 6efb0df5-0435-424a-b4cc-1eaefdcf388d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 744.158310] env[69982]: INFO nova.scheduler.client.report [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Deleted allocations for instance fb6d0f81-0eb1-45aa-a3ad-d3958de582c0 [ 744.225842] env[69982]: DEBUG nova.network.neutron [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updated VIF entry in instance network info cache for port aead0d1e-bc05-4064-8494-624226340060. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 744.227165] env[69982]: DEBUG nova.network.neutron [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [{"id": "aead0d1e-bc05-4064-8494-624226340060", "address": "fa:16:3e:3c:80:7e", "network": {"id": "d085c3c7-14e1-4907-8ba4-c86bdcc6d4d9", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-258973916-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75340e5596304ad38d40e450a5425cba", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6f493cd8-1cb4-42a1-8d56-bfa6ac7cf563", "external-id": "nsx-vlan-transportzone-931", "segmentation_id": 931, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaead0d1e-bc", "ovs_interfaceid": "aead0d1e-bc05-4064-8494-624226340060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.284745] env[69982]: DEBUG oslo_vmware.api [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864318, 'name': PowerOnVM_Task, 'duration_secs': 0.629526} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.285081] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.285446] env[69982]: INFO nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Took 9.92 seconds to spawn the instance on the hypervisor. [ 744.285700] env[69982]: DEBUG nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.286790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130cc188-dfe8-4c9e-b874-061c50cbfcc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.362335] env[69982]: DEBUG oslo_vmware.api [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864320, 'name': PowerOnVM_Task, 'duration_secs': 0.636825} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.363745] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 744.363745] env[69982]: INFO nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Took 5.47 seconds to spawn the instance on the hypervisor. [ 744.363745] env[69982]: DEBUG nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 744.363962] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fc8dbd-e809-4c9f-81c3-c158286d6bb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.474972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.476013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.476013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.476013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.476013] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.479033] env[69982]: INFO nova.compute.manager [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Terminating instance [ 744.631852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.632595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.632595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.632595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.632829] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.635016] env[69982]: INFO nova.compute.manager [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Terminating instance [ 744.671015] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1e38d2c3-761e-4f00-97f2-04fe98e47242 tempest-InstanceActionsV221TestJSON-1477886705 tempest-InstanceActionsV221TestJSON-1477886705-project-member] Lock "fb6d0f81-0eb1-45aa-a3ad-d3958de582c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.238s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.733366] env[69982]: DEBUG oslo_concurrency.lockutils [req-461fc19f-60a3-4278-9640-3e7fadb8aabd req-8c00c26a-e091-4582-9251-38ee41f36b08 service nova] Releasing lock "refresh_cache-5ba60eb7-ee6f-47e2-b6ca-b54817dab371" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 744.816247] env[69982]: INFO nova.compute.manager [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Took 36.28 seconds to build instance. [ 744.882107] env[69982]: INFO nova.compute.manager [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Took 31.88 seconds to build instance. [ 744.985837] env[69982]: DEBUG nova.compute.manager [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 744.986103] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 744.987064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7610fc1-bcb5-4f20-ac53-db5fd3130b4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.999202] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.000060] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83494fbe-60e5-4be6-bf33-4919b9910a1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.009634] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 745.009634] env[69982]: value = "task-3864323" [ 745.009634] env[69982]: _type = "Task" [ 745.009634] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.025630] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.144132] env[69982]: DEBUG nova.compute.manager [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 745.144132] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 745.144132] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5597aa0f-7f2c-4c7e-8f7c-80b455c5e732 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.153793] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 745.155458] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-105b8791-40ba-4409-a132-ca25f583dc65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.166325] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 745.166325] env[69982]: value = "task-3864324" [ 745.166325] env[69982]: _type = "Task" [ 745.166325] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.180103] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864324, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.248744] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-041932d8-cadc-4c7a-a4a4-2e867c06c783 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.262888] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ce7493-3208-4a4d-8b93-a217f9ef3851 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.307181] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec6de95-23cd-44c3-a87b-0731e78a2b04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.316887] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f12026-62ef-44b9-a403-83349272daa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.323029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2666f363-4216-43b5-81ca-1a738b10e8d5 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.215s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.339115] env[69982]: DEBUG nova.compute.provider_tree [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.377825] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.378016] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.387296] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b1dea016-84d2-40da-802c-a69aa7a457c5 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.816s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.395496] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Successfully updated port: 0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 745.523081] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864323, 'name': PowerOffVM_Task, 'duration_secs': 0.350658} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.523081] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.523081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.523081] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2509e35d-bdbd-45a1-9b2f-25bd8da3ef1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.582065] env[69982]: DEBUG nova.compute.manager [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Received event network-vif-plugged-0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.582241] env[69982]: DEBUG oslo_concurrency.lockutils [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] Acquiring lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.582511] env[69982]: DEBUG oslo_concurrency.lockutils [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.582756] env[69982]: DEBUG oslo_concurrency.lockutils [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.582944] env[69982]: DEBUG nova.compute.manager [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] No waiting events found dispatching network-vif-plugged-0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 745.583224] env[69982]: WARNING nova.compute.manager [req-f817508e-721d-4beb-8c22-7b335a08119a req-9f5f7b85-2fe5-4fd6-89d1-ae5fd348b65f service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Received unexpected event network-vif-plugged-0fd7d67c-3b77-4d0b-ad20-76668f828c54 for instance with vm_state building and task_state spawning. [ 745.584442] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.584442] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.584663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.585255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.585255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.587850] env[69982]: INFO nova.compute.manager [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Terminating instance [ 745.600230] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 745.601040] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 745.601331] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore1] f9124657-d1c5-4a93-9d4a-3b06ca60ec63 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.602366] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ae74511-2875-4bd6-8e72-f7c0efe342e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.616286] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 745.616286] env[69982]: value = "task-3864327" [ 745.616286] env[69982]: _type = "Task" [ 745.616286] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.631851] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.680949] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864324, 'name': PowerOffVM_Task, 'duration_secs': 0.261073} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.681289] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.681970] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.681970] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d13bb0bb-201a-4afd-b44f-0ec603525bbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.725362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.725655] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.761037] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 745.761272] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 745.761610] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Deleting the datastore file [datastore1] bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.762157] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ad980ae-646f-4a62-b19f-8b3c57abbc81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.770806] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for the task: (returnval){ [ 745.770806] env[69982]: value = "task-3864329" [ 745.770806] env[69982]: _type = "Task" [ 745.770806] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.780424] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.826726] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.847061] env[69982]: DEBUG nova.scheduler.client.report [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.851435] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "25957956-0d50-4b4f-8e5c-f55a1e182235" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.851887] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 745.852186] env[69982]: DEBUG nova.compute.manager [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 745.856681] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e09f52-9a3d-4cd8-a7cb-ec07f1de1270 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.865023] env[69982]: DEBUG nova.compute.manager [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 745.865023] env[69982]: DEBUG nova.objects.instance [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lazy-loading 'flavor' on Instance uuid 25957956-0d50-4b4f-8e5c-f55a1e182235 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 745.893574] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 745.903364] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.903558] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquired lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.903745] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 746.094759] env[69982]: DEBUG nova.compute.manager [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 746.095049] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 746.095917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efa6c0d-74b1-44dd-8166-cfaa43cad4fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.105661] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 746.105812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d221f8a4-2d66-4070-a84b-381ec91057c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.114235] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 746.114235] env[69982]: value = "task-3864330" [ 746.114235] env[69982]: _type = "Task" [ 746.114235] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.127353] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864330, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.130812] env[69982]: DEBUG oslo_vmware.api [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242044} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.130952] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.131166] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 746.131342] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.131514] env[69982]: INFO nova.compute.manager [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Took 1.15 seconds to destroy the instance on the hypervisor. [ 746.132178] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.132178] env[69982]: DEBUG nova.compute.manager [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 746.132332] env[69982]: DEBUG nova.network.neutron [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 746.282090] env[69982]: DEBUG oslo_vmware.api [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Task: {'id': task-3864329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174836} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.283248] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 746.283248] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 746.283248] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 746.283248] env[69982]: INFO nova.compute.manager [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 746.283481] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.283808] env[69982]: DEBUG nova.compute.manager [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 746.283808] env[69982]: DEBUG nova.network.neutron [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 746.354972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.358447] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.132s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.360178] env[69982]: INFO nova.compute.claims [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.365500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.388660] env[69982]: INFO nova.scheduler.client.report [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Deleted allocations for instance 6efb0df5-0435-424a-b4cc-1eaefdcf388d [ 746.429409] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.483106] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 746.494121] env[69982]: INFO nova.compute.manager [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Rebuilding instance [ 746.572218] env[69982]: DEBUG nova.compute.manager [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 746.573884] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b887e85-f6fb-4206-a6b6-ddca73cabcf2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.637833] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864330, 'name': PowerOffVM_Task, 'duration_secs': 0.252325} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.638092] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 746.638269] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 746.638540] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5452bb83-3b08-4707-9b93-0eb7618fe82b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.698789] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "d2684194-a688-4466-9852-1f4ff656f057" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.699471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.712381] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 746.712558] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 746.712737] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Deleting the datastore file [datastore1] 5ba60eb7-ee6f-47e2-b6ca-b54817dab371 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 746.713040] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a551ae92-2ce2-4334-bf5b-82106d1806ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.722398] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for the task: (returnval){ [ 746.722398] env[69982]: value = "task-3864332" [ 746.722398] env[69982]: _type = "Task" [ 746.722398] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.733544] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.749128] env[69982]: DEBUG nova.network.neutron [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updating instance_info_cache with network_info: [{"id": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "address": "fa:16:3e:b0:f1:37", "network": {"id": "e8b0aeb2-7651-4e24-a214-20300cf68cbf", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-467279852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9a8d7388f954868bf53433760a092c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fd7d67c-3b", "ovs_interfaceid": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.873274] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 746.873274] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88801838-7853-454d-88bf-6ebe2e06b060 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.884278] env[69982]: DEBUG oslo_vmware.api [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 746.884278] env[69982]: value = "task-3864333" [ 746.884278] env[69982]: _type = "Task" [ 746.884278] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.899596] env[69982]: DEBUG oslo_vmware.api [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.900593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6d421bda-f2cf-4195-b510-2d78988cfce6 tempest-ServerMetadataTestJSON-1714962535 tempest-ServerMetadataTestJSON-1714962535-project-member] Lock "6efb0df5-0435-424a-b4cc-1eaefdcf388d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.480s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.941294] env[69982]: DEBUG nova.compute.manager [req-9b99b991-78ba-475a-8468-8a66b7658004 req-9e29d9cf-b40d-408e-80f5-73e3d6d425ae service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Received event network-vif-deleted-5bd32150-607c-4d0c-8e66-e2d5014aca93 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.941489] env[69982]: INFO nova.compute.manager [req-9b99b991-78ba-475a-8468-8a66b7658004 req-9e29d9cf-b40d-408e-80f5-73e3d6d425ae service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Neutron deleted interface 5bd32150-607c-4d0c-8e66-e2d5014aca93; detaching it from the instance and deleting it from the info cache [ 746.941667] env[69982]: DEBUG nova.network.neutron [req-9b99b991-78ba-475a-8468-8a66b7658004 req-9e29d9cf-b40d-408e-80f5-73e3d6d425ae service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.134699] env[69982]: DEBUG nova.network.neutron [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.174357] env[69982]: DEBUG nova.network.neutron [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.237035] env[69982]: DEBUG oslo_vmware.api [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Task: {'id': task-3864332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157754} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.237035] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 747.237035] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 747.237035] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 747.237035] env[69982]: INFO nova.compute.manager [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Took 1.14 seconds to destroy the instance on the hypervisor. [ 747.237292] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.237292] env[69982]: DEBUG nova.compute.manager [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 747.237292] env[69982]: DEBUG nova.network.neutron [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 747.252109] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Releasing lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.255013] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Instance network_info: |[{"id": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "address": "fa:16:3e:b0:f1:37", "network": {"id": "e8b0aeb2-7651-4e24-a214-20300cf68cbf", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-467279852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9a8d7388f954868bf53433760a092c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fd7d67c-3b", "ovs_interfaceid": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 747.255164] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:f1:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cfbd1264-be3d-4ca9-953a-df79de7b010b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0fd7d67c-3b77-4d0b-ad20-76668f828c54', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 747.262839] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Creating folder: Project (e9a8d7388f954868bf53433760a092c4). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.263835] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b49634a2-220f-4d35-8e42-f7753d6d6c89 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.279113] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Created folder: Project (e9a8d7388f954868bf53433760a092c4) in parent group-v767796. [ 747.279113] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Creating folder: Instances. Parent ref: group-v767892. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 747.279113] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bfe4fcca-b151-4967-8568-6273c4ebf3dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.291025] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Created folder: Instances in parent group-v767892. [ 747.291025] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 747.291025] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 747.291025] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0733371-e9d6-4d32-b5ac-d369aa73e52a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.313076] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 747.313076] env[69982]: value = "task-3864336" [ 747.313076] env[69982]: _type = "Task" [ 747.313076] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.322452] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864336, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.396961] env[69982]: DEBUG oslo_vmware.api [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864333, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.445911] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8a54944-c1c2-456d-9d71-e01ee04ae202 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.460120] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8249df5-13ec-444e-915c-c213cfef834f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.510979] env[69982]: DEBUG nova.compute.manager [req-9b99b991-78ba-475a-8468-8a66b7658004 req-9e29d9cf-b40d-408e-80f5-73e3d6d425ae service nova] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Detach interface failed, port_id=5bd32150-607c-4d0c-8e66-e2d5014aca93, reason: Instance bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 747.597739] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.599983] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f6a75d4-5e73-4d02-aa8e-5d8fc6f10571 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.610431] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 747.610431] env[69982]: value = "task-3864337" [ 747.610431] env[69982]: _type = "Task" [ 747.610431] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.629197] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864337, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.638358] env[69982]: INFO nova.compute.manager [-] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Took 1.51 seconds to deallocate network for instance. [ 747.680963] env[69982]: INFO nova.compute.manager [-] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Took 1.40 seconds to deallocate network for instance. [ 747.733829] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 747.734101] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767891', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'name': 'volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3e109fff-94bd-41a9-bc43-373143b7fda5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'serial': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 747.735052] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f42ea1-b7af-4507-b325-93c688214790 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.759958] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a78354-fba8-4ce7-b478-7cd36b79f60e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.765083] env[69982]: DEBUG nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Received event network-changed-0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.765287] env[69982]: DEBUG nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Refreshing instance network info cache due to event network-changed-0fd7d67c-3b77-4d0b-ad20-76668f828c54. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 747.765607] env[69982]: DEBUG oslo_concurrency.lockutils [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] Acquiring lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.765689] env[69982]: DEBUG oslo_concurrency.lockutils [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] Acquired lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.765794] env[69982]: DEBUG nova.network.neutron [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Refreshing network info cache for port 0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 747.803260] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8/volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 747.807642] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51c9a462-a67b-4bd9-bfe3-0ec9f7165d63 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.832074] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864336, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.833778] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 747.833778] env[69982]: value = "task-3864338" [ 747.833778] env[69982]: _type = "Task" [ 747.833778] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.846532] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864338, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.896625] env[69982]: DEBUG oslo_vmware.api [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864333, 'name': PowerOffVM_Task, 'duration_secs': 0.705301} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.896921] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 747.897103] env[69982]: DEBUG nova.compute.manager [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 747.899612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1aa77b7-a74f-4b05-83b2-ea2bd9784634 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.083389] env[69982]: DEBUG nova.network.neutron [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.096340] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39773cbd-b4e9-4230-bace-215bdab8c3ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.104923] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a42586-4da2-4fa9-86dd-684951312ca3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.147838] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0d73d3-eafe-4a57-b3a5-dd9c256359c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.151197] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864337, 'name': PowerOffVM_Task, 'duration_secs': 0.18166} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.153064] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.153064] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.153196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751469e0-60a6-4ac2-8d34-d69017ab1244 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.159121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.161296] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833bcab8-5105-4d4b-9e37-fde1ddd1c71d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.168408] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.169202] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57b74250-5fe6-4896-8a22-be9e66fd6456 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.179791] env[69982]: DEBUG nova.compute.provider_tree [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.189076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.200568] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.200836] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.201251] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Deleting the datastore file [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.201718] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a51c49a4-c9a2-4621-99d6-79faf88295f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.209537] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 748.209537] env[69982]: value = "task-3864340" [ 748.209537] env[69982]: _type = "Task" [ 748.209537] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.221894] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.334208] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864336, 'name': CreateVM_Task, 'duration_secs': 0.839466} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.334551] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.339183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.339402] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.339895] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 748.340169] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34619b21-fb20-4791-a486-01227a3196c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.353441] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864338, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.353831] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 748.353831] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e2085-bc43-e054-ebd8-c0ac84ac2d30" [ 748.353831] env[69982]: _type = "Task" [ 748.353831] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.365629] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e2085-bc43-e054-ebd8-c0ac84ac2d30, 'name': SearchDatastore_Task, 'duration_secs': 0.014736} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.365629] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.365629] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 748.365824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.365955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.366201] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 748.366467] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a23f166-1092-4b77-a6b6-3f55b9abfcc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.378637] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 748.378996] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 748.379984] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87368054-0a3c-4f7b-a5a7-d2b89bd74b40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.387096] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 748.387096] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5236050e-9a7e-1385-6f12-478450422366" [ 748.387096] env[69982]: _type = "Task" [ 748.387096] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.396435] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5236050e-9a7e-1385-6f12-478450422366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.412633] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96a79fd4-a623-4d14-919e-b8ee0a80ed61 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.516303] env[69982]: DEBUG nova.network.neutron [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updated VIF entry in instance network info cache for port 0fd7d67c-3b77-4d0b-ad20-76668f828c54. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 748.516700] env[69982]: DEBUG nova.network.neutron [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updating instance_info_cache with network_info: [{"id": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "address": "fa:16:3e:b0:f1:37", "network": {"id": "e8b0aeb2-7651-4e24-a214-20300cf68cbf", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-467279852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9a8d7388f954868bf53433760a092c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fd7d67c-3b", "ovs_interfaceid": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.586579] env[69982]: INFO nova.compute.manager [-] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Took 1.35 seconds to deallocate network for instance. [ 748.683988] env[69982]: DEBUG nova.scheduler.client.report [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.723419] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14305} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.723806] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.723921] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 748.724082] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.849011] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864338, 'name': ReconfigVM_Task, 'duration_secs': 0.764637} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.849638] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfigured VM instance instance-0000001c to attach disk [datastore2] volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8/volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.857330] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2e93b2e-53b0-4c61-ae97-c0606ffea6c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.878151] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 748.878151] env[69982]: value = "task-3864341" [ 748.878151] env[69982]: _type = "Task" [ 748.878151] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.890078] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.905639] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5236050e-9a7e-1385-6f12-478450422366, 'name': SearchDatastore_Task, 'duration_secs': 0.013054} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.906697] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5f741ca-8c2f-4b5a-8eed-98d072c7eeba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.918693] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 748.918693] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5279ca90-278a-302d-3b49-bb0df8ab4e4e" [ 748.918693] env[69982]: _type = "Task" [ 748.918693] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.929144] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5279ca90-278a-302d-3b49-bb0df8ab4e4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.020674] env[69982]: DEBUG oslo_concurrency.lockutils [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] Releasing lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.020963] env[69982]: DEBUG nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Received event network-vif-deleted-b4fb0d2a-f1bd-487c-9def-5e6db13a1e3c {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.021225] env[69982]: DEBUG nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Received event network-vif-deleted-aead0d1e-bc05-4064-8494-624226340060 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.021435] env[69982]: INFO nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Neutron deleted interface aead0d1e-bc05-4064-8494-624226340060; detaching it from the instance and deleting it from the info cache [ 749.021652] env[69982]: DEBUG nova.network.neutron [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.100470] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.194020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.833s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.194020] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.197347] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.321s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.202474] env[69982]: INFO nova.compute.claims [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.393900] env[69982]: DEBUG oslo_vmware.api [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864341, 'name': ReconfigVM_Task, 'duration_secs': 0.153565} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.394481] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767891', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'name': 'volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3e109fff-94bd-41a9-bc43-373143b7fda5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'serial': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 749.434859] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5279ca90-278a-302d-3b49-bb0df8ab4e4e, 'name': SearchDatastore_Task, 'duration_secs': 0.010775} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.435275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.435463] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ad0c405f-48c8-4726-8e95-eb83a6e158fe/ad0c405f-48c8-4726-8e95-eb83a6e158fe.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 749.436127] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c738e1d6-6dad-4d0a-a185-5c54829a3725 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.448443] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 749.448443] env[69982]: value = "task-3864342" [ 749.448443] env[69982]: _type = "Task" [ 749.448443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.460154] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.528401] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78ab6983-efd5-43d0-9476-3d426f533d46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.541959] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdbdafe-8ad7-4b72-b60e-0354c232bdf1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.587636] env[69982]: DEBUG nova.compute.manager [req-84949d14-9a32-4e56-b719-5c2fb2c2d18d req-9de36563-826c-4889-9ae9-be75fe1ea8a6 service nova] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Detach interface failed, port_id=aead0d1e-bc05-4064-8494-624226340060, reason: Instance 5ba60eb7-ee6f-47e2-b6ca-b54817dab371 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 749.707557] env[69982]: DEBUG nova.compute.utils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 749.711540] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 749.712405] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.776050] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 749.776356] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.776586] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 749.776822] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.776989] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 749.777167] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 749.777380] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 749.777595] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 749.777841] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 749.779075] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 749.779075] env[69982]: DEBUG nova.virt.hardware [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 749.779794] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f175fc2e-58a2-4bd4-91dd-cf31232542d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.790413] env[69982]: DEBUG nova.policy [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b1893ca4db304162ab4b478439603981', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fb13b27f37d4ce4bfb6b1b6c0b74fdc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.793791] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ae7033-8199-4403-b95b-d3f2545baadf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.811014] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.818626] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.818976] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.819260] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-364810f4-6a1d-4a87-9439-e26940699802 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.838794] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.838794] env[69982]: value = "task-3864343" [ 749.838794] env[69982]: _type = "Task" [ 749.838794] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.850146] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864343, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.960133] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864342, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.149106] env[69982]: DEBUG nova.compute.manager [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 750.150825] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267ad2aa-0c7b-43a7-8578-2937411425bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.223271] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.333607] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Successfully created port: 6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.353242] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864343, 'name': CreateVM_Task, 'duration_secs': 0.437945} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.353420] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 750.353842] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.354008] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.354418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 750.354672] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56b3977e-5fec-4718-bb12-75e4ef250884 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.360036] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 750.360036] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521eb9cd-71dc-f8c4-a3b3-c421efa186d2" [ 750.360036] env[69982]: _type = "Task" [ 750.360036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.376411] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521eb9cd-71dc-f8c4-a3b3-c421efa186d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.460471] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534603} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.463305] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ad0c405f-48c8-4726-8e95-eb83a6e158fe/ad0c405f-48c8-4726-8e95-eb83a6e158fe.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 750.463522] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 750.465255] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51adb086-4b7b-48d6-aabd-05db83e39982 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.467505] env[69982]: DEBUG nova.objects.instance [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'flavor' on Instance uuid 3e109fff-94bd-41a9-bc43-373143b7fda5 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 750.476504] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 750.476504] env[69982]: value = "task-3864344" [ 750.476504] env[69982]: _type = "Task" [ 750.476504] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.494366] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.670601] env[69982]: INFO nova.compute.manager [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] instance snapshotting [ 750.670841] env[69982]: WARNING nova.compute.manager [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 750.676717] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dca77b-7c8c-4304-b778-9eea0efd53eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.701521] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.706140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fe1193-d029-485c-9d24-4d7f66f40990 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.840840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea604585-c982-4893-8101-1f4f0cba3aed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.853595] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da94f42-5c6d-4a37-9cda-489cd49ed293 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.896849] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdd7e89-489c-4164-9c95-f0e90f28363b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.906737] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521eb9cd-71dc-f8c4-a3b3-c421efa186d2, 'name': SearchDatastore_Task, 'duration_secs': 0.013525} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.909345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.909602] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.909889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.910055] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.910245] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.910564] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8995df7-386c-492b-addd-67d6904f69f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.913490] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f20ae7-fbb0-4de1-99c2-364801444500 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.932076] env[69982]: DEBUG nova.compute.provider_tree [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.935801] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.935801] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 750.936503] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41ccbbc0-6a00-481b-b581-4df1d965b521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.944660] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 750.944660] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52052c78-7360-8109-e6ba-afd1a1c01658" [ 750.944660] env[69982]: _type = "Task" [ 750.944660] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.954599] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52052c78-7360-8109-e6ba-afd1a1c01658, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.972967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-097537bd-446f-4666-8fd4-6b5241a93bd3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.893s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.973982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.273s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.974294] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.974416] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.974583] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.976663] env[69982]: INFO nova.compute.manager [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Terminating instance [ 750.993850] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07811} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.995218] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 750.995899] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-996e404a-4541-4717-bd00-abaa8ab4a6a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.019750] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] ad0c405f-48c8-4726-8e95-eb83a6e158fe/ad0c405f-48c8-4726-8e95-eb83a6e158fe.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.020021] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19397144-3a83-4019-a44e-6dcef97325d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.041169] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 751.041169] env[69982]: value = "task-3864345" [ 751.041169] env[69982]: _type = "Task" [ 751.041169] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.049945] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.220433] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 751.221399] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-995a0b7c-8b44-4a0f-be2f-c81f328d843c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.229561] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 751.229561] env[69982]: value = "task-3864346" [ 751.229561] env[69982]: _type = "Task" [ 751.229561] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.237906] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.246883] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864346, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.274357] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.278025] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.278025] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.278025] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.278025] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.278025] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.278333] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.278333] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.278333] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.278333] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.278333] env[69982]: DEBUG nova.virt.hardware [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.278488] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cf8b9c-68f7-488b-8a9e-891dd2438923 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.288818] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea4e347-ae5a-4078-ac15-ddba8ee2bc77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.440178] env[69982]: DEBUG nova.scheduler.client.report [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.458322] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52052c78-7360-8109-e6ba-afd1a1c01658, 'name': SearchDatastore_Task, 'duration_secs': 0.043597} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.460094] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a957a0fa-3871-4865-9e2b-2fdefdb243ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.466410] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 751.466410] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b0588e-92f0-c7fc-6e5b-1e20f747f301" [ 751.466410] env[69982]: _type = "Task" [ 751.466410] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.482528] env[69982]: DEBUG nova.compute.manager [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 751.482528] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 751.483670] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b0588e-92f0-c7fc-6e5b-1e20f747f301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.483670] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f807b513-8f06-4390-9cdf-f35fc272cc7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.492322] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 751.492322] env[69982]: value = "task-3864347" [ 751.492322] env[69982]: _type = "Task" [ 751.492322] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.502926] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.555258] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864345, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.718342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "fae97132-44b4-4df1-bd34-ba694ea7016a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.718757] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.743381] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864346, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.950727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.951362] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.956727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.055s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.982020] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b0588e-92f0-c7fc-6e5b-1e20f747f301, 'name': SearchDatastore_Task, 'duration_secs': 0.012127} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.982020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.982020] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 751.982020] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d69b7539-c8df-4689-97cb-552ee6147dba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.992595] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 751.992595] env[69982]: value = "task-3864348" [ 751.992595] env[69982]: _type = "Task" [ 751.992595] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.007202] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864348, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.010437] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864347, 'name': PowerOffVM_Task, 'duration_secs': 0.22961} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.010750] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 752.010941] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 752.011160] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767891', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'name': 'volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3e109fff-94bd-41a9-bc43-373143b7fda5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'serial': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 752.015748] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfeed142-2cb7-4251-89db-2e50acf0b11c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.042141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf813939-61f6-43c5-b52d-e4e18c327400 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.061835] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864345, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.062535] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c507885-e24e-430c-9cf1-f0a53114c009 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.087976] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44e230b-8a50-42b0-a5f1-3b95e906f773 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.105696] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] The volume has not been displaced from its original location: [datastore2] volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8/volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 752.111158] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfiguring VM instance instance-0000001c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 752.111522] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73f2a8b8-42e4-4f5b-89c8-126888fcfc4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.132993] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 752.132993] env[69982]: value = "task-3864349" [ 752.132993] env[69982]: _type = "Task" [ 752.132993] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.141374] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864349, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.146801] env[69982]: DEBUG nova.compute.manager [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Received event network-vif-plugged-6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.146991] env[69982]: DEBUG oslo_concurrency.lockutils [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] Acquiring lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.147200] env[69982]: DEBUG oslo_concurrency.lockutils [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.147362] env[69982]: DEBUG oslo_concurrency.lockutils [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.148267] env[69982]: DEBUG nova.compute.manager [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] No waiting events found dispatching network-vif-plugged-6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 752.148267] env[69982]: WARNING nova.compute.manager [req-af0ae35a-a97c-4ced-b2cd-7a493126024c req-3162c04a-7756-4e48-a51f-983e49bfb306 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Received unexpected event network-vif-plugged-6de9b5d7-f6ba-4f07-9293-e372e9466ce1 for instance with vm_state building and task_state spawning. [ 752.224982] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Successfully updated port: 6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 752.242560] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864346, 'name': CreateSnapshot_Task, 'duration_secs': 0.527678} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.243339] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 752.244405] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b84a0a-8714-42fa-bc7a-2afe5292213d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.472644] env[69982]: DEBUG nova.compute.utils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.476326] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.476326] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.501344] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864348, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.556589] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864345, 'name': ReconfigVM_Task, 'duration_secs': 1.393061} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.556903] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Reconfigured VM instance instance-00000020 to attach disk [datastore2] ad0c405f-48c8-4726-8e95-eb83a6e158fe/ad0c405f-48c8-4726-8e95-eb83a6e158fe.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.557580] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1da5a56-6bdb-4de9-abb2-19c21dab99a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.566339] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 752.566339] env[69982]: value = "task-3864350" [ 752.566339] env[69982]: _type = "Task" [ 752.566339] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.576347] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864350, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.579303] env[69982]: DEBUG nova.policy [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9865336291ac4440bd18495935352d6c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66125bb452294cea84c97f820c3e94ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.642817] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864349, 'name': ReconfigVM_Task, 'duration_secs': 0.209433} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.643189] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Reconfigured VM instance instance-0000001c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 752.648052] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54873117-cd58-4a31-bb4e-a7333ff8518a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.665479] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 752.665479] env[69982]: value = "task-3864351" [ 752.665479] env[69982]: _type = "Task" [ 752.665479] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.675719] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.730170] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.730340] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquired lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.730478] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 752.769420] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 752.769420] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e3c6c895-64c0-4700-9b7a-8e72d4179894 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.783274] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 752.783274] env[69982]: value = "task-3864352" [ 752.783274] env[69982]: _type = "Task" [ 752.783274] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.805721] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864352, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.981371] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 753.002543] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864348, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.887801} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.002820] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 753.003047] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 753.005465] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cec0bbe2-863b-44ea-bfcf-a7301fd4f323 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.011549] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 753.011549] env[69982]: value = "task-3864353" [ 753.011549] env[69982]: _type = "Task" [ 753.011549] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.016199] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.020103] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 743a4a52-ce35-4ec1-9286-e0c470e87186 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.020103] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6de35617-22cf-4a32-8651-28ea67532b8f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.020103] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a8217447-bc22-4b84-925f-c3c09fb7228c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.020103] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 303c7ee1-8d26-460b-aab9-d55c71cf8a73 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021237] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance bba73604-c54f-4643-9e4c-326b643b3d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021237] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 930c8740-5ad1-4491-8dd6-1a568eaa6f62 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021237] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021237] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 2d554902-bf28-4ee2-b9d6-4219e54246fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021478] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021478] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 589419ea-c609-45bb-bde5-3b22d9ff111e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021478] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ebd9e006-a591-44f7-867c-041731b9d45a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021478] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021606] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 67f59d53-c61b-48ad-b55d-710595e9dae3 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021606] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance f9124657-d1c5-4a93-9d4a-3b06ca60ec63 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021606] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 5743a020-0c09-45ec-aca4-5ce367cc201a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021606] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 5ba60eb7-ee6f-47e2-b6ca-b54817dab371 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021606] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 3e109fff-94bd-41a9-bc43-373143b7fda5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021773] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 753.021773] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 25957956-0d50-4b4f-8e5c-f55a1e182235 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021773] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 365b8207-f49b-4ee7-af6f-9d271eed2e38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021773] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ad0c405f-48c8-4726-8e95-eb83a6e158fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021902] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance bd242bac-cd36-4fff-9325-fc14d5ceb566 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.021902] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 24641406-5292-4497-b34f-9af0dcdc58d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.030234] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864353, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.079265] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864350, 'name': Rename_Task, 'duration_secs': 0.402911} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.079687] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.080055] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d61d13fa-902b-4028-a3d4-77e879595d77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.089207] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 753.089207] env[69982]: value = "task-3864354" [ 753.089207] env[69982]: _type = "Task" [ 753.089207] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.099102] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.179870] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864351, 'name': ReconfigVM_Task, 'duration_secs': 0.261215} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.180323] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767891', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'name': 'volume-f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3e109fff-94bd-41a9-bc43-373143b7fda5', 'attached_at': '', 'detached_at': '', 'volume_id': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8', 'serial': 'f3eebfb7-d3e8-424c-86f7-97bb01603ed8'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 753.183567] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 753.183567] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb09f1d-36d5-469d-9fef-5ed19169b615 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.191943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 753.191943] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e68963f-98c4-4fc8-8651-7c8e7ab9b105 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.277746] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 753.278144] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 753.278543] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore1] 3e109fff-94bd-41a9-bc43-373143b7fda5 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 753.278756] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c8c3926-73d5-44e7-bcad-eaf4daba95b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.290357] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Successfully created port: f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.294750] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 753.297678] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 753.297678] env[69982]: value = "task-3864356" [ 753.297678] env[69982]: _type = "Task" [ 753.297678] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.307283] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864352, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.313123] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.523939] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 48162423-a117-437e-b171-9a40c7c6f49b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.529824] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864353, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097755} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.530571] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 753.533066] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb3b2d8-523b-452b-858c-dbcf696807a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.557841] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 753.560186] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdff8038-41da-4e8d-bad2-b1becb250427 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.575955] env[69982]: DEBUG nova.network.neutron [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Updating instance_info_cache with network_info: [{"id": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "address": "fa:16:3e:bc:78:05", "network": {"id": "04b16fcf-a000-4ed9-8e67-917372cdb28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1626570086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb13b27f37d4ce4bfb6b1b6c0b74fdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de9b5d7-f6", "ovs_interfaceid": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.585452] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 753.585452] env[69982]: value = "task-3864357" [ 753.585452] env[69982]: _type = "Task" [ 753.585452] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.596820] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864357, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.604289] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864354, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.795018] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864352, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.812480] env[69982]: DEBUG oslo_vmware.api [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264186} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.812849] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.813073] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.813325] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.813622] env[69982]: INFO nova.compute.manager [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Took 2.33 seconds to destroy the instance on the hypervisor. [ 753.813953] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 753.814157] env[69982]: DEBUG nova.compute.manager [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 753.814268] env[69982]: DEBUG nova.network.neutron [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 753.998383] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 754.031868] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d73153ad-9258-4c3c-9699-b6364408d631 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.047372] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 754.047775] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.047857] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 754.048307] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.049444] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 754.049444] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 754.049444] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 754.049444] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 754.049444] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 754.049614] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 754.049614] env[69982]: DEBUG nova.virt.hardware [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 754.053333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875ec5a3-9ca8-4e41-be83-20030d9f0004 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.066019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d4a866-393a-411d-9353-20c79ed6ee6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.084826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Releasing lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.085193] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Instance network_info: |[{"id": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "address": "fa:16:3e:bc:78:05", "network": {"id": "04b16fcf-a000-4ed9-8e67-917372cdb28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1626570086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb13b27f37d4ce4bfb6b1b6c0b74fdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de9b5d7-f6", "ovs_interfaceid": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 754.085971] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:78:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd48f0ef6-34e5-44d4-8baf-4470ed96ce73', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6de9b5d7-f6ba-4f07-9293-e372e9466ce1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 754.095921] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Creating folder: Project (7fb13b27f37d4ce4bfb6b1b6c0b74fdc). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 754.100109] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87e18784-d7e3-4f6b-ba6d-3dcf7a63a1c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.118254] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864357, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.119570] env[69982]: DEBUG oslo_vmware.api [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864354, 'name': PowerOnVM_Task, 'duration_secs': 0.695034} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.119906] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.120136] env[69982]: INFO nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Took 10.13 seconds to spawn the instance on the hypervisor. [ 754.120322] env[69982]: DEBUG nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 754.121364] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c623dc-2636-41d3-9211-56ccc67bc097 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.127684] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Created folder: Project (7fb13b27f37d4ce4bfb6b1b6c0b74fdc) in parent group-v767796. [ 754.127930] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Creating folder: Instances. Parent ref: group-v767898. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 754.128500] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d963b40-4d27-45fa-a17d-1b8087ccb84c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.148035] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Created folder: Instances in parent group-v767898. [ 754.149727] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 754.149727] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 754.149727] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad410144-d8ec-4ffe-ac15-5e3047c16e30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.171767] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 754.171767] env[69982]: value = "task-3864360" [ 754.171767] env[69982]: _type = "Task" [ 754.171767] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.183584] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864360, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.296750] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864352, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.395134] env[69982]: DEBUG nova.compute.manager [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Received event network-changed-6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.395134] env[69982]: DEBUG nova.compute.manager [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Refreshing instance network info cache due to event network-changed-6de9b5d7-f6ba-4f07-9293-e372e9466ce1. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 754.395134] env[69982]: DEBUG oslo_concurrency.lockutils [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] Acquiring lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.395134] env[69982]: DEBUG oslo_concurrency.lockutils [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] Acquired lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.395134] env[69982]: DEBUG nova.network.neutron [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Refreshing network info cache for port 6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 754.460195] env[69982]: DEBUG nova.compute.manager [req-e2ff3019-27cb-4d42-903c-15383f46ec2a req-bd079e76-bf59-4701-8755-6c6e114ebbd9 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Received event network-vif-deleted-061b4fb2-6c43-46cc-87f5-b307884088b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.460195] env[69982]: INFO nova.compute.manager [req-e2ff3019-27cb-4d42-903c-15383f46ec2a req-bd079e76-bf59-4701-8755-6c6e114ebbd9 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Neutron deleted interface 061b4fb2-6c43-46cc-87f5-b307884088b1; detaching it from the instance and deleting it from the info cache [ 754.460195] env[69982]: DEBUG nova.network.neutron [req-e2ff3019-27cb-4d42-903c-15383f46ec2a req-bd079e76-bf59-4701-8755-6c6e114ebbd9 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.535268] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7af5a14d-f586-4746-9831-8be255581637 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 754.597901] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864357, 'name': ReconfigVM_Task, 'duration_secs': 0.570723} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.598134] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 754.600391] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58169d2f-b6d4-45f3-a6c4-54e004b7b795 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.609186] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 754.609186] env[69982]: value = "task-3864361" [ 754.609186] env[69982]: _type = "Task" [ 754.609186] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.627951] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864361, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.651502] env[69982]: INFO nova.compute.manager [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Took 38.59 seconds to build instance. [ 754.685332] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864360, 'name': CreateVM_Task, 'duration_secs': 0.504411} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.685523] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 754.686578] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.686743] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.687112] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 754.688876] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d2a97fe-eff7-4068-82b9-ba9248c27252 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.695744] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 754.695744] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52da8ecf-6237-21a0-1f4e-f5d85933b673" [ 754.695744] env[69982]: _type = "Task" [ 754.695744] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.707772] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52da8ecf-6237-21a0-1f4e-f5d85933b673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.797551] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864352, 'name': CloneVM_Task, 'duration_secs': 1.952433} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.797853] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Created linked-clone VM from snapshot [ 754.798709] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8da968-8ad7-43d1-840d-97df9a3115b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.808835] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Uploading image a87af1d8-1cc3-4b33-80f7-7a0661bcd1dd {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 754.851583] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 754.851583] env[69982]: value = "vm-767897" [ 754.851583] env[69982]: _type = "VirtualMachine" [ 754.851583] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 754.851583] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0e25af41-f188-4780-a5d2-5a5ab55e651f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.860747] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease: (returnval){ [ 754.860747] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5230c3e7-f166-0d8d-1dd8-4a930dd96068" [ 754.860747] env[69982]: _type = "HttpNfcLease" [ 754.860747] env[69982]: } obtained for exporting VM: (result){ [ 754.860747] env[69982]: value = "vm-767897" [ 754.860747] env[69982]: _type = "VirtualMachine" [ 754.860747] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 754.861054] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the lease: (returnval){ [ 754.861054] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5230c3e7-f166-0d8d-1dd8-4a930dd96068" [ 754.861054] env[69982]: _type = "HttpNfcLease" [ 754.861054] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 754.869576] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 754.869576] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5230c3e7-f166-0d8d-1dd8-4a930dd96068" [ 754.869576] env[69982]: _type = "HttpNfcLease" [ 754.869576] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 754.901397] env[69982]: DEBUG nova.network.neutron [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.963121] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df162eb0-1b14-4500-b5ac-2c1aa4ca064f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.978722] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80863839-e896-4e98-90c5-ad11cf9ea265 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.014046] env[69982]: DEBUG nova.compute.manager [req-e2ff3019-27cb-4d42-903c-15383f46ec2a req-bd079e76-bf59-4701-8755-6c6e114ebbd9 service nova] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Detach interface failed, port_id=061b4fb2-6c43-46cc-87f5-b307884088b1, reason: Instance 3e109fff-94bd-41a9-bc43-373143b7fda5 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 755.043162] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.120027] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864361, 'name': Rename_Task, 'duration_secs': 0.168229} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.120379] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.120638] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0f53fd4-3bcb-47a5-b27a-d9c98034e8c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.129614] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 755.129614] env[69982]: value = "task-3864363" [ 755.129614] env[69982]: _type = "Task" [ 755.129614] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.150959] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864363, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.155648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-685183c9-1e95-4f12-bde5-a2bf306db39a tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.418s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.211751] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52da8ecf-6237-21a0-1f4e-f5d85933b673, 'name': SearchDatastore_Task, 'duration_secs': 0.013057} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.212565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.212565] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 755.212705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 755.212778] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 755.212958] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.213265] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e26d9c6-251b-46c5-a6de-d46d7b95d66c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.226045] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.226045] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 755.226045] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e9a919b-4471-46bd-bd00-245df29141bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.233853] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 755.233853] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c95b05-edd1-8839-8f17-de723f3bd7fd" [ 755.233853] env[69982]: _type = "Task" [ 755.233853] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.245210] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c95b05-edd1-8839-8f17-de723f3bd7fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.372803] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 755.372803] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5230c3e7-f166-0d8d-1dd8-4a930dd96068" [ 755.372803] env[69982]: _type = "HttpNfcLease" [ 755.372803] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 755.374302] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 755.374302] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5230c3e7-f166-0d8d-1dd8-4a930dd96068" [ 755.374302] env[69982]: _type = "HttpNfcLease" [ 755.374302] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 755.374553] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b08906-e703-422f-b8bc-552c992f4a27 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.380623] env[69982]: DEBUG nova.network.neutron [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Updated VIF entry in instance network info cache for port 6de9b5d7-f6ba-4f07-9293-e372e9466ce1. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 755.380623] env[69982]: DEBUG nova.network.neutron [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Updating instance_info_cache with network_info: [{"id": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "address": "fa:16:3e:bc:78:05", "network": {"id": "04b16fcf-a000-4ed9-8e67-917372cdb28e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1626570086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb13b27f37d4ce4bfb6b1b6c0b74fdc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d48f0ef6-34e5-44d4-8baf-4470ed96ce73", "external-id": "nsx-vlan-transportzone-316", "segmentation_id": 316, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6de9b5d7-f6", "ovs_interfaceid": "6de9b5d7-f6ba-4f07-9293-e372e9466ce1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.393688] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 755.393688] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 755.455150] env[69982]: INFO nova.compute.manager [-] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Took 1.64 seconds to deallocate network for instance. [ 755.489767] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9d06afc3-e467-4a2e-9479-6fb1f5c7247b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.499750] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Successfully updated port: f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.549596] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8b812422-4ca6-4d2b-b6af-873fdb21fab6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 755.652261] env[69982]: DEBUG oslo_vmware.api [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864363, 'name': PowerOnVM_Task, 'duration_secs': 0.474881} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.652261] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 755.652261] env[69982]: DEBUG nova.compute.manager [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 755.652778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e2fc7a-3e70-4c7d-aa3f-68bcb9617c01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.662255] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.748936] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c95b05-edd1-8839-8f17-de723f3bd7fd, 'name': SearchDatastore_Task, 'duration_secs': 0.016383} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.750549] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3451610c-da23-4617-8aa5-5f0c0d7238b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.757820] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 755.757820] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ecf3ad-c623-086b-7ec4-cb818ec5af26" [ 755.757820] env[69982]: _type = "Task" [ 755.757820] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.768188] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ecf3ad-c623-086b-7ec4-cb818ec5af26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.780560] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.780859] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.884376] env[69982]: DEBUG oslo_concurrency.lockutils [req-b472adf5-9518-49da-91fe-f467410a3511 req-409c0156-1ae9-4fe5-aef9-27d4d0f07713 service nova] Releasing lock "refresh_cache-bd242bac-cd36-4fff-9325-fc14d5ceb566" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.001100] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.001525] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.005026] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 756.049042] env[69982]: INFO nova.compute.manager [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Took 0.59 seconds to detach 1 volumes for instance. [ 756.054987] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.189835] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.206994] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.272438] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ecf3ad-c623-086b-7ec4-cb818ec5af26, 'name': SearchDatastore_Task, 'duration_secs': 0.01196} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.273424] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.274216] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] bd242bac-cd36-4fff-9325-fc14d5ceb566/bd242bac-cd36-4fff-9325-fc14d5ceb566.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.274216] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-beb661df-acff-4b79-b3e5-ba48d37239df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.284329] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 756.284329] env[69982]: value = "task-3864364" [ 756.284329] env[69982]: _type = "Task" [ 756.284329] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.295288] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864364, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.555560] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 756.563464] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d5c23433-a0f3-4f0a-9c62-051d07dcd712 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 756.566341] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.768930] env[69982]: DEBUG nova.compute.manager [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received event network-vif-plugged-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.770102] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.770102] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.772087] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.772626] env[69982]: DEBUG nova.compute.manager [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] No waiting events found dispatching network-vif-plugged-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.772626] env[69982]: WARNING nova.compute.manager [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received unexpected event network-vif-plugged-f53bc105-863e-4018-a52b-e0115c017916 for instance with vm_state building and task_state spawning. [ 756.772802] env[69982]: DEBUG nova.compute.manager [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received event network-changed-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.773099] env[69982]: DEBUG nova.compute.manager [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing instance network info cache due to event network-changed-f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 756.773302] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Acquiring lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.800024] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864364, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.880829] env[69982]: DEBUG nova.network.neutron [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [{"id": "f53bc105-863e-4018-a52b-e0115c017916", "address": "fa:16:3e:6c:bd:6c", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf53bc105-86", "ovs_interfaceid": "f53bc105-863e-4018-a52b-e0115c017916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.891890] env[69982]: DEBUG nova.compute.manager [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Received event network-changed-0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.891890] env[69982]: DEBUG nova.compute.manager [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Refreshing instance network info cache due to event network-changed-0fd7d67c-3b77-4d0b-ad20-76668f828c54. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 756.891890] env[69982]: DEBUG oslo_concurrency.lockutils [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] Acquiring lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.891890] env[69982]: DEBUG oslo_concurrency.lockutils [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] Acquired lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.891890] env[69982]: DEBUG nova.network.neutron [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Refreshing network info cache for port 0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 757.033273] env[69982]: INFO nova.compute.manager [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Rebuilding instance [ 757.066657] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9123b08c-d2ec-4c4d-bade-0acdae75640a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.095452] env[69982]: DEBUG nova.compute.manager [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.096479] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d295a6-0bd5-40be-b800-b75cc65bbedb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.296189] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864364, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584459} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.296487] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] bd242bac-cd36-4fff-9325-fc14d5ceb566/bd242bac-cd36-4fff-9325-fc14d5ceb566.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.296703] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.297099] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-074eb651-a0d8-48e3-a7b2-9b404d4dae96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.305452] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 757.305452] env[69982]: value = "task-3864365" [ 757.305452] env[69982]: _type = "Task" [ 757.305452] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.319358] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864365, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.384396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.384956] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance network_info: |[{"id": "f53bc105-863e-4018-a52b-e0115c017916", "address": "fa:16:3e:6c:bd:6c", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf53bc105-86", "ovs_interfaceid": "f53bc105-863e-4018-a52b-e0115c017916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 757.385608] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Acquired lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.385608] env[69982]: DEBUG nova.network.neutron [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing network info cache for port f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 757.387039] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:bd:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7150f662-0cf1-44f9-ae14-d70f479649b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f53bc105-863e-4018-a52b-e0115c017916', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 757.396319] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.399665] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 757.400335] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-899d3888-bf7f-42be-9cf3-3787b6567dfd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.430241] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.430241] env[69982]: value = "task-3864366" [ 757.430241] env[69982]: _type = "Task" [ 757.430241] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.443441] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864366, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.572963] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d2684194-a688-4466-9852-1f4ff656f057 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 757.729199] env[69982]: DEBUG nova.network.neutron [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updated VIF entry in instance network info cache for port 0fd7d67c-3b77-4d0b-ad20-76668f828c54. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 757.731637] env[69982]: DEBUG nova.network.neutron [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updating instance_info_cache with network_info: [{"id": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "address": "fa:16:3e:b0:f1:37", "network": {"id": "e8b0aeb2-7651-4e24-a214-20300cf68cbf", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-467279852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9a8d7388f954868bf53433760a092c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fd7d67c-3b", "ovs_interfaceid": "0fd7d67c-3b77-4d0b-ad20-76668f828c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.816829] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864365, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078984} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.817183] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 757.818142] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98089419-9477-4f8a-9f8e-6b01d6ea0e71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.844289] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] bd242bac-cd36-4fff-9325-fc14d5ceb566/bd242bac-cd36-4fff-9325-fc14d5ceb566.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 757.844725] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a03f7677-fb8b-4f1f-9b13-bfc62ac55332 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.866167] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 757.866167] env[69982]: value = "task-3864367" [ 757.866167] env[69982]: _type = "Task" [ 757.866167] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.876050] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.947147] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864366, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.079521] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance fae97132-44b4-4df1-bd34-ba694ea7016a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 758.079856] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 758.080053] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 758.118682] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 758.119105] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d83f7e84-ab4a-43a3-9422-60b648a1f2a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.127493] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 758.127493] env[69982]: value = "task-3864368" [ 758.127493] env[69982]: _type = "Task" [ 758.127493] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.153899] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.233367] env[69982]: DEBUG oslo_concurrency.lockutils [req-a470da4f-fcc7-48b1-84d5-4c678eaad7e3 req-f0d92920-d649-4916-b168-68c1f8004c96 service nova] Releasing lock "refresh_cache-ad0c405f-48c8-4726-8e95-eb83a6e158fe" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.382568] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864367, 'name': ReconfigVM_Task, 'duration_secs': 0.469471} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.383140] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Reconfigured VM instance instance-00000021 to attach disk [datastore2] bd242bac-cd36-4fff-9325-fc14d5ceb566/bd242bac-cd36-4fff-9325-fc14d5ceb566.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.384044] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffbe35d4-ccef-41df-84d7-cdb4ff6df94e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.396968] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 758.396968] env[69982]: value = "task-3864369" [ 758.396968] env[69982]: _type = "Task" [ 758.396968] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.413187] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864369, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.420581] env[69982]: DEBUG nova.network.neutron [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updated VIF entry in instance network info cache for port f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 758.421051] env[69982]: DEBUG nova.network.neutron [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [{"id": "f53bc105-863e-4018-a52b-e0115c017916", "address": "fa:16:3e:6c:bd:6c", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf53bc105-86", "ovs_interfaceid": "f53bc105-863e-4018-a52b-e0115c017916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.453933] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864366, 'name': CreateVM_Task, 'duration_secs': 0.523302} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.457320] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 758.458893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.459363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.459790] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 758.460311] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f2c5ef-a808-4691-93b2-521d9d0ab2a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.473334] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.473739] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.479018] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 758.479018] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aadd72-5650-a2db-ef24-d285f6fea996" [ 758.479018] env[69982]: _type = "Task" [ 758.479018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.488691] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aadd72-5650-a2db-ef24-d285f6fea996, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.647178] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864368, 'name': PowerOffVM_Task, 'duration_secs': 0.152335} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.648714] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 758.649039] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 758.650122] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2299306d-9966-423e-b77f-f56087f1d76a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.658241] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 758.658528] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-369dd467-ec40-4117-9786-2a1874ee8601 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.686421] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 758.686653] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 758.686893] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Deleting the datastore file [datastore2] 365b8207-f49b-4ee7-af6f-9d271eed2e38 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 758.687188] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32ba804e-2e1a-4f1c-95bf-8331418a2d81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.695781] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 758.695781] env[69982]: value = "task-3864371" [ 758.695781] env[69982]: _type = "Task" [ 758.695781] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.704913] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864371, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.720307] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca412369-9d9a-447f-b8ff-c9ef9034e10b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.728804] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914c986d-8dcd-44f3-b6ba-80d391bbea42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.762655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182a1021-dacb-4624-b12c-efd846a2bff0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.771870] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629ea26c-deb3-4fc3-ac5b-70513e9d9107 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.788758] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.909718] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864369, 'name': Rename_Task, 'duration_secs': 0.188349} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.911059] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 758.911059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c8f8bba-6ebe-4d67-ac66-e80492f51348 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.919034] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 758.919034] env[69982]: value = "task-3864372" [ 758.919034] env[69982]: _type = "Task" [ 758.919034] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.927727] env[69982]: DEBUG oslo_concurrency.lockutils [req-d568d681-27b9-43cd-afc0-a372a03370ab req-b1bb492c-fa80-4a7f-b94d-57e8266c8367 service nova] Releasing lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.928514] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.988328] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aadd72-5650-a2db-ef24-d285f6fea996, 'name': SearchDatastore_Task, 'duration_secs': 0.019163} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.988648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.988883] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 758.989156] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.989338] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.989498] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.989792] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba16649b-8019-4987-99a0-8f86b4cd9691 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.000644] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.000916] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.001792] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb8467a0-5364-4b4a-b943-204de2f499e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.010255] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 759.010255] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296d774-dd7f-3be9-5c83-168c6e95df92" [ 759.010255] env[69982]: _type = "Task" [ 759.010255] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.021363] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296d774-dd7f-3be9-5c83-168c6e95df92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.207151] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864371, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201602} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.207441] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 759.207626] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 759.207888] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.293213] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.433276] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.521401] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296d774-dd7f-3be9-5c83-168c6e95df92, 'name': SearchDatastore_Task, 'duration_secs': 0.019255} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.525023] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99224604-5c50-47b8-8207-b710fb0c1674 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.529803] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 759.529803] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521a03a2-3020-0ae2-21bc-9e748c49635f" [ 759.529803] env[69982]: _type = "Task" [ 759.529803] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.543585] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521a03a2-3020-0ae2-21bc-9e748c49635f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.801027] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 759.801027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.843s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.801027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.862s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.802616] env[69982]: INFO nova.compute.claims [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.936614] env[69982]: DEBUG oslo_vmware.api [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864372, 'name': PowerOnVM_Task, 'duration_secs': 0.595121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.937864] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 759.937864] env[69982]: INFO nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Took 8.70 seconds to spawn the instance on the hypervisor. [ 759.937981] env[69982]: DEBUG nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 759.938792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb02409b-0d21-4e9f-b754-27c3fa3c51c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.041183] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521a03a2-3020-0ae2-21bc-9e748c49635f, 'name': SearchDatastore_Task, 'duration_secs': 0.023485} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.042892] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.042892] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 24641406-5292-4497-b34f-9af0dcdc58d7/24641406-5292-4497-b34f-9af0dcdc58d7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.042892] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dc5ed3d-cf0a-4309-989a-1dd870890640 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.053392] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 760.053392] env[69982]: value = "task-3864373" [ 760.053392] env[69982]: _type = "Task" [ 760.053392] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.062906] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.255166] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 760.255442] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 760.255598] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 760.255789] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 760.255923] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 760.256129] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 760.256376] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 760.256542] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 760.256710] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 760.256883] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 760.257122] env[69982]: DEBUG nova.virt.hardware [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 760.258039] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e291078d-abad-4fc2-b24c-e359a089cef2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.267832] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a036508-163e-459d-97b6-851b0b2f7ce1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.283589] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 760.289465] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 760.289719] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 760.289962] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2159292e-f1ef-46c3-a7d0-98eebd43c2b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.306961] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 760.306961] env[69982]: value = "task-3864374" [ 760.306961] env[69982]: _type = "Task" [ 760.306961] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.318662] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864374, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.463916] env[69982]: INFO nova.compute.manager [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Took 40.27 seconds to build instance. [ 760.565611] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864373, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.824237] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864374, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.972950] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcee5e35-a364-4e2c-a1c4-79f39465a155 tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.114s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.067627] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742843} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.067940] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 24641406-5292-4497-b34f-9af0dcdc58d7/24641406-5292-4497-b34f-9af0dcdc58d7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.068181] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.068447] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2649a44-07e2-4561-9cb3-b25517b23831 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.076598] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 761.076598] env[69982]: value = "task-3864375" [ 761.076598] env[69982]: _type = "Task" [ 761.076598] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.086450] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.245544] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.245853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.324863] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864374, 'name': CreateVM_Task, 'duration_secs': 0.972396} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.325700] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 761.326227] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.326396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.326716] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 761.327994] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89121648-5012-4192-9d00-ba1efce56c26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.332816] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 761.332816] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd52c-e8fb-ecae-5b97-46ef477ec9b0" [ 761.332816] env[69982]: _type = "Task" [ 761.332816] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.344752] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd52c-e8fb-ecae-5b97-46ef477ec9b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.412920] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ef54b8-b5cd-48c6-b685-6ca59fd61632 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.426395] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8cc912-76f0-4385-ad1a-68d55e2170d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.462658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.462658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.462658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.462658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.462850] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.464402] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecf1bf8-ce14-4d16-9140-ef5b77c45881 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.467709] env[69982]: INFO nova.compute.manager [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Terminating instance [ 761.475916] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d56583-387b-48dc-a667-f5c2cd812f2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.480584] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 761.495155] env[69982]: DEBUG nova.compute.provider_tree [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.587629] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153459} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.588061] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.588763] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c10fc6-ea7d-4128-a631-c49c6ecd6a30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.616893] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 24641406-5292-4497-b34f-9af0dcdc58d7/24641406-5292-4497-b34f-9af0dcdc58d7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.617293] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6c77b80-73b9-41b0-bf7b-197e9412adea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.642596] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 761.642596] env[69982]: value = "task-3864376" [ 761.642596] env[69982]: _type = "Task" [ 761.642596] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.654563] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.844021] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526dd52c-e8fb-ecae-5b97-46ef477ec9b0, 'name': SearchDatastore_Task, 'duration_secs': 0.028966} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.844391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.844645] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 761.844954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.845135] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.845284] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.845562] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-785557dd-7919-4d6c-b4b6-77a54cfc8531 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.855694] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.856015] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 761.857041] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c3b2ed-7273-4886-912b-a24ce2999d50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.864451] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 761.864451] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e6ddf-3b54-eff6-dda5-351c20a0eb7b" [ 761.864451] env[69982]: _type = "Task" [ 761.864451] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.874651] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e6ddf-3b54-eff6-dda5-351c20a0eb7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.925680] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "67f59d53-c61b-48ad-b55d-710595e9dae3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.926098] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.926439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.926731] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.927019] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.930031] env[69982]: INFO nova.compute.manager [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Terminating instance [ 761.972988] env[69982]: DEBUG nova.compute.manager [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.973344] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.975061] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74e30fb-00ad-4e0d-b74d-f0d8220397b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.989504] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.989942] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87e4e805-f38e-4c6f-8cba-3567531ace12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.001162] env[69982]: DEBUG nova.scheduler.client.report [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 762.009671] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 762.009671] env[69982]: value = "task-3864377" [ 762.009671] env[69982]: _type = "Task" [ 762.009671] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.021881] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.026715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.157412] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.377455] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e6ddf-3b54-eff6-dda5-351c20a0eb7b, 'name': SearchDatastore_Task, 'duration_secs': 0.033352} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.378434] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bbcf92c-d902-4ac2-b674-660c732ce60f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.385478] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 762.385478] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a1e05-ea00-ca9b-0ed4-cde72b81fe65" [ 762.385478] env[69982]: _type = "Task" [ 762.385478] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.394054] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a1e05-ea00-ca9b-0ed4-cde72b81fe65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.434418] env[69982]: DEBUG nova.compute.manager [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 762.434571] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.434929] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c9e090e-a317-4e93-a686-9ec32726c53e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.443709] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 762.443709] env[69982]: value = "task-3864378" [ 762.443709] env[69982]: _type = "Task" [ 762.443709] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.454303] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864378, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.511428] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 762.512013] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 762.516073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.056s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.516736] env[69982]: INFO nova.compute.claims [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 762.530197] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864377, 'name': PowerOffVM_Task, 'duration_secs': 0.318486} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.530480] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.530633] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.531243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e727445-1e3d-4d49-a18e-f9b45bb60553 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.616969] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.617357] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.617522] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Deleting the datastore file [datastore2] bd242bac-cd36-4fff-9325-fc14d5ceb566 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.617855] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57cc02c5-d626-4a8c-a57f-e38075753dfb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.626384] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for the task: (returnval){ [ 762.626384] env[69982]: value = "task-3864380" [ 762.626384] env[69982]: _type = "Task" [ 762.626384] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.635610] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.654057] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864376, 'name': ReconfigVM_Task, 'duration_secs': 0.675603} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.654057] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 24641406-5292-4497-b34f-9af0dcdc58d7/24641406-5292-4497-b34f-9af0dcdc58d7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.654057] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72f32b5c-452f-42a5-873b-d05c90938d24 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.661603] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 762.661603] env[69982]: value = "task-3864381" [ 762.661603] env[69982]: _type = "Task" [ 762.661603] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.671109] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864381, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.897499] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a1e05-ea00-ca9b-0ed4-cde72b81fe65, 'name': SearchDatastore_Task, 'duration_secs': 0.009426} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.897851] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.898208] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.898549] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52c9e0c0-4004-4ac8-9112-fa70d8ad0228 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.906525] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 762.906525] env[69982]: value = "task-3864382" [ 762.906525] env[69982]: _type = "Task" [ 762.906525] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.916159] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.954549] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864378, 'name': PowerOffVM_Task, 'duration_secs': 0.196608} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.954825] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.955084] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 762.955296] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767837', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'name': 'volume-7296ca0e-ce02-4177-9218-289c56e21e9a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f59d53-c61b-48ad-b55d-710595e9dae3', 'attached_at': '', 'detached_at': '', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'serial': '7296ca0e-ce02-4177-9218-289c56e21e9a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 762.956776] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3493f93e-f4e2-4c81-98fd-ec993b392f76 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.978833] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7dc31ba-9856-46f1-95de-046b77433c7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.987486] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6510fd7-7ab6-4376-b8bb-32154f70a088 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.008041] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6563ac69-d81e-446d-ac28-b3d743f301ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.027317] env[69982]: DEBUG nova.compute.utils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.029553] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] The volume has not been displaced from its original location: [datastore1] volume-7296ca0e-ce02-4177-9218-289c56e21e9a/volume-7296ca0e-ce02-4177-9218-289c56e21e9a.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 763.034932] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Reconfiguring VM instance instance-00000018 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 763.035645] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 763.035825] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.037718] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75d469a4-72a4-4b85-bb23-ead241a7345e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.057594] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 763.057594] env[69982]: value = "task-3864383" [ 763.057594] env[69982]: _type = "Task" [ 763.057594] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.067350] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864383, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.090907] env[69982]: DEBUG nova.policy [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c9a3b4f9b694316851d3ea26f8bca6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '344ff7edee7c427bbbdb29e1a8c91a46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.136521] env[69982]: DEBUG oslo_vmware.api [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Task: {'id': task-3864380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16064} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.136781] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.136963] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.137168] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.137341] env[69982]: INFO nova.compute.manager [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Took 1.16 seconds to destroy the instance on the hypervisor. [ 763.137583] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.137796] env[69982]: DEBUG nova.compute.manager [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 763.137911] env[69982]: DEBUG nova.network.neutron [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 763.173740] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864381, 'name': Rename_Task, 'duration_secs': 0.15758} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.174052] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.174318] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1b5e980-38b6-4435-b8cd-942206325626 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.182183] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 763.182183] env[69982]: value = "task-3864384" [ 763.182183] env[69982]: _type = "Task" [ 763.182183] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.191179] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.419261] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.495462] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Successfully created port: 29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.507350] env[69982]: DEBUG nova.compute.manager [req-4e83d436-d76b-4d69-a363-762db8b58fff req-3163d562-aa4c-4883-a49b-73c931d404d8 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Received event network-vif-deleted-6de9b5d7-f6ba-4f07-9293-e372e9466ce1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.507350] env[69982]: INFO nova.compute.manager [req-4e83d436-d76b-4d69-a363-762db8b58fff req-3163d562-aa4c-4883-a49b-73c931d404d8 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Neutron deleted interface 6de9b5d7-f6ba-4f07-9293-e372e9466ce1; detaching it from the instance and deleting it from the info cache [ 763.507350] env[69982]: DEBUG nova.network.neutron [req-4e83d436-d76b-4d69-a363-762db8b58fff req-3163d562-aa4c-4883-a49b-73c931d404d8 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.539870] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 763.576113] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.693484] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864384, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.919107] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864382, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.957591] env[69982]: DEBUG nova.network.neutron [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.009426] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e897618d-9e97-4966-83ba-aaed7bb77f9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.026237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1443ba-6c28-4ea6-a9b6-ab6c31141a77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.070270] env[69982]: DEBUG nova.compute.manager [req-4e83d436-d76b-4d69-a363-762db8b58fff req-3163d562-aa4c-4883-a49b-73c931d404d8 service nova] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Detach interface failed, port_id=6de9b5d7-f6ba-4f07-9293-e372e9466ce1, reason: Instance bd242bac-cd36-4fff-9325-fc14d5ceb566 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 764.085233] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864383, 'name': ReconfigVM_Task, 'duration_secs': 0.946576} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.087652] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 764.088595] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Reconfigured VM instance instance-00000018 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 764.095166] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207d6806-ad04-4624-8a4b-ffd5fcab5115 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.101542] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f9944d1-6dcc-4683-aee8-31accafed2fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.118270] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 764.118461] env[69982]: ERROR oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk due to incomplete transfer. [ 764.120503] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-7a1ba296-29d8-48b5-91e0-088b1c22dc39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.122995] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 764.122995] env[69982]: value = "task-3864385" [ 764.122995] env[69982]: _type = "Task" [ 764.122995] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.133454] env[69982]: DEBUG oslo_vmware.rw_handles [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52c964a7-c60c-a90e-d5cc-b0be29a29178/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 764.133693] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Uploaded image a87af1d8-1cc3-4b33-80f7-7a0661bcd1dd to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 764.136442] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 764.140031] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ef338bf8-9d5a-4dbf-9655-babe0651cb59 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.142036] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864385, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.152497] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 764.152497] env[69982]: value = "task-3864386" [ 764.152497] env[69982]: _type = "Task" [ 764.152497] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.166687] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864386, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.177168] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b145e9e-cc89-4b98-86b0-4c70f45546d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.189415] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4332a1e9-ca8b-4ec6-8f36-78048ce6c8f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.199353] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864384, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.227277] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6bb353-3bb7-43d7-9abc-32816726cb4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.236501] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f987789-4e40-416a-bd1d-2f8fa9e59936 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.252716] env[69982]: DEBUG nova.compute.provider_tree [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.420910] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864382, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.395998} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.420910] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 764.420910] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 764.420910] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5298b687-5168-4471-bbba-4dcd3d895036 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.427981] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 764.427981] env[69982]: value = "task-3864387" [ 764.427981] env[69982]: _type = "Task" [ 764.427981] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.439090] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864387, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.461765] env[69982]: INFO nova.compute.manager [-] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Took 1.32 seconds to deallocate network for instance. [ 764.551338] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 764.578343] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 764.578629] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.578897] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 764.579148] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.579338] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 764.579518] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 764.579773] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 764.580013] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 764.580281] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 764.580440] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 764.580906] env[69982]: DEBUG nova.virt.hardware [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 764.581998] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7461057-6b97-4351-b2ec-85dccc3e8e86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.590627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6e4e2c-e3d5-4a24-b0f7-7463b6a98425 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.632688] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864385, 'name': ReconfigVM_Task, 'duration_secs': 0.246265} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.633111] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767837', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'name': 'volume-7296ca0e-ce02-4177-9218-289c56e21e9a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '67f59d53-c61b-48ad-b55d-710595e9dae3', 'attached_at': '', 'detached_at': '', 'volume_id': '7296ca0e-ce02-4177-9218-289c56e21e9a', 'serial': '7296ca0e-ce02-4177-9218-289c56e21e9a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 764.633345] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 764.634251] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a616d514-7be1-4eb5-8c3d-164584d42edc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.643810] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 764.645426] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fca9b142-e76f-44fd-8309-b1772a5a763d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.664717] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864386, 'name': Destroy_Task, 'duration_secs': 0.405222} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.665136] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Destroyed the VM [ 764.665398] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 764.665658] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b3a09b97-6d9a-40ef-9dea-2be8e1a907d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.672988] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 764.672988] env[69982]: value = "task-3864389" [ 764.672988] env[69982]: _type = "Task" [ 764.672988] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.681648] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864389, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.690829] env[69982]: DEBUG oslo_vmware.api [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864384, 'name': PowerOnVM_Task, 'duration_secs': 1.382798} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.691099] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.691301] env[69982]: INFO nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 10.69 seconds to spawn the instance on the hypervisor. [ 764.691482] env[69982]: DEBUG nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.692295] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1877c2bd-76ea-40e6-ba58-f895cb8f47b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.713315] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.713548] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.713728] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Deleting the datastore file [datastore1] 67f59d53-c61b-48ad-b55d-710595e9dae3 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.714034] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79d25586-39da-4dda-ad20-8e4bca2414b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.722091] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for the task: (returnval){ [ 764.722091] env[69982]: value = "task-3864390" [ 764.722091] env[69982]: _type = "Task" [ 764.722091] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.730907] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864390, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.756122] env[69982]: DEBUG nova.scheduler.client.report [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.942020] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864387, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.942020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 764.942603] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac4c2f6-2b95-4e25-8023-31763ae005b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.967393] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 764.968202] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66a510cf-7321-4be2-9b1b-2b6e60b307a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.984218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.992709] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 764.992709] env[69982]: value = "task-3864391" [ 764.992709] env[69982]: _type = "Task" [ 764.992709] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.002556] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864391, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.101918] env[69982]: DEBUG nova.compute.manager [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received event network-vif-plugged-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.102173] env[69982]: DEBUG oslo_concurrency.lockutils [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] Acquiring lock "48162423-a117-437e-b171-9a40c7c6f49b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.102389] env[69982]: DEBUG oslo_concurrency.lockutils [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] Lock "48162423-a117-437e-b171-9a40c7c6f49b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.102585] env[69982]: DEBUG oslo_concurrency.lockutils [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] Lock "48162423-a117-437e-b171-9a40c7c6f49b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.102785] env[69982]: DEBUG nova.compute.manager [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] No waiting events found dispatching network-vif-plugged-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 765.102967] env[69982]: WARNING nova.compute.manager [req-4d982d65-f304-4d73-9e64-a5b0e2531ea5 req-98c5d3d4-1198-4a68-8e41-1cf8b9132c3a service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received unexpected event network-vif-plugged-29f8b8ab-a86a-44eb-86e6-495ca7006221 for instance with vm_state building and task_state spawning. [ 765.185017] env[69982]: DEBUG oslo_vmware.api [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864389, 'name': RemoveSnapshot_Task, 'duration_secs': 0.381773} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.185298] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 765.185521] env[69982]: INFO nova.compute.manager [None req-a8ea274e-3776-472e-a98d-632fc04bab3a tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Took 14.51 seconds to snapshot the instance on the hypervisor. [ 765.206267] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Successfully updated port: 29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.216113] env[69982]: INFO nova.compute.manager [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 42.36 seconds to build instance. [ 765.235214] env[69982]: DEBUG oslo_vmware.api [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Task: {'id': task-3864390, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283769} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.235504] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.235694] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.235914] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.236102] env[69982]: INFO nova.compute.manager [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Took 2.80 seconds to destroy the instance on the hypervisor. [ 765.236356] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.236540] env[69982]: DEBUG nova.compute.manager [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.236635] env[69982]: DEBUG nova.network.neutron [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 765.261565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.262170] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.265139] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.387s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.265342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.267369] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.809s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.269110] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.269350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.526s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.271068] env[69982]: INFO nova.compute.claims [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.296977] env[69982]: INFO nova.scheduler.client.report [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted allocations for instance 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b [ 765.300727] env[69982]: INFO nova.scheduler.client.report [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Deleted allocations for instance 303c7ee1-8d26-460b-aab9-d55c71cf8a73 [ 765.506805] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864391, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.714134] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.714290] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.714475] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 765.718942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-67c4da56-596d-4e61-86f7-320c9c83a6a4 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.735s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.776044] env[69982]: DEBUG nova.compute.utils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.778115] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 765.807958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f68660f1-1e29-4f7c-a3ad-885f4d30144d tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.105s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.809579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-faa58cab-1fce-404d-aab6-9ce762354452 tempest-ImagesOneServerTestJSON-1015784420 tempest-ImagesOneServerTestJSON-1015784420-project-member] Lock "303c7ee1-8d26-460b-aab9-d55c71cf8a73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.295s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.824582] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "25957956-0d50-4b4f-8e5c-f55a1e182235" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.824906] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.825173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.825385] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.825569] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.831585] env[69982]: INFO nova.compute.manager [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Terminating instance [ 766.002330] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864391, 'name': ReconfigVM_Task, 'duration_secs': 0.769269} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.002774] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 365b8207-f49b-4ee7-af6f-9d271eed2e38/365b8207-f49b-4ee7-af6f-9d271eed2e38.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.003286] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ea1825c-ca54-4843-8f33-cb275718e3c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.012968] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 766.012968] env[69982]: value = "task-3864392" [ 766.012968] env[69982]: _type = "Task" [ 766.012968] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.025068] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864392, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.222039] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 766.270101] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 766.281121] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.336699] env[69982]: DEBUG nova.compute.manager [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 766.336932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 766.341134] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de5e538-de10-4933-826f-68a1f3d6dd1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.356226] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 766.356573] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15e4beaa-80c1-44e7-a392-ad135a17d463 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.420404] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "c563267f-7699-4bd1-83cf-59ecef500ac3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.420651] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.431193] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 766.431458] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 766.431700] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore1] 25957956-0d50-4b4f-8e5c-f55a1e182235 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 766.433113] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54fb904b-3a33-46d4-a7e3-709a6c02d04c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.440049] env[69982]: DEBUG oslo_vmware.api [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 766.440049] env[69982]: value = "task-3864394" [ 766.440049] env[69982]: _type = "Task" [ 766.440049] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.454348] env[69982]: DEBUG oslo_vmware.api [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.508365] env[69982]: DEBUG nova.network.neutron [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.530636] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864392, 'name': Rename_Task, 'duration_secs': 0.340848} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.531604] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 766.531855] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad5eaac5-c6aa-4d0d-b419-f2d4469aa221 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.542589] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Waiting for the task: (returnval){ [ 766.542589] env[69982]: value = "task-3864395" [ 766.542589] env[69982]: _type = "Task" [ 766.542589] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.557739] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.643258] env[69982]: DEBUG nova.network.neutron [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.751335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.919389] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbf30be-305a-4198-9150-afd26a6b31bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.932391] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b513727-d06e-492f-a1aa-4bcf2f4ce2a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.972369] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c003d53-79c4-4651-8132-f8663185b147 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.979366] env[69982]: DEBUG oslo_vmware.api [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284691} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.982150] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 766.982347] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 766.982520] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 766.982684] env[69982]: INFO nova.compute.manager [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Took 0.65 seconds to destroy the instance on the hypervisor. [ 766.982910] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.983559] env[69982]: DEBUG nova.compute.manager [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 766.983652] env[69982]: DEBUG nova.network.neutron [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 766.989555] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100374d7-e5e8-4c1c-97d7-09aeb1a79335 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.005993] env[69982]: DEBUG nova.compute.provider_tree [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.017441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.017743] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Instance network_info: |[{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 767.018422] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:eb:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6815237d-f565-474d-a3c0-9c675478eb00', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29f8b8ab-a86a-44eb-86e6-495ca7006221', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.026102] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.026848] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.027104] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6a4f3db-737b-4458-9817-0e719247077b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.059093] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864395, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.059333] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.059333] env[69982]: value = "task-3864396" [ 767.059333] env[69982]: _type = "Task" [ 767.059333] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.068676] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864396, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.146365] env[69982]: INFO nova.compute.manager [-] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Took 1.91 seconds to deallocate network for instance. [ 767.299117] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.340775] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.341530] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.344035] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.344035] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.344035] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.344035] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.344035] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.344889] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.344889] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.344889] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.344889] env[69982]: DEBUG nova.virt.hardware [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.349020] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36d27c5-51d2-4a03-a348-45b84d1ca881 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.363715] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3018393-2f71-4068-a057-4d6d9f5cbd61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.383770] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 767.394106] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Creating folder: Project (9658c45205af40088787f30deeaf0700). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 767.395441] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d34e24d-e21d-4676-a10e-44d95e5630cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.411540] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Created folder: Project (9658c45205af40088787f30deeaf0700) in parent group-v767796. [ 767.412111] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Creating folder: Instances. Parent ref: group-v767904. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 767.412585] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3c6df74-896c-4245-b18b-c748816f536f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.430184] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Created folder: Instances in parent group-v767904. [ 767.430184] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 767.430184] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 767.430184] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c385130-8673-4892-9e3e-455b7bd2ea6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.453984] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 767.453984] env[69982]: value = "task-3864399" [ 767.453984] env[69982]: _type = "Task" [ 767.453984] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.467402] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864399, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.484250] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "bba73604-c54f-4643-9e4c-326b643b3d51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.486146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.486146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.486146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.486146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.489510] env[69982]: INFO nova.compute.manager [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Terminating instance [ 767.509443] env[69982]: DEBUG nova.scheduler.client.report [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.557844] env[69982]: DEBUG oslo_vmware.api [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Task: {'id': task-3864395, 'name': PowerOnVM_Task, 'duration_secs': 0.749044} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.557844] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.557844] env[69982]: DEBUG nova.compute.manager [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.558353] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7824a721-8822-4ad5-b218-e64046192537 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.576030] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864396, 'name': CreateVM_Task, 'duration_secs': 0.460759} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.576030] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.576030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.576030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.576475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.576475] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80fd4cf6-7eb3-4c6b-bdd2-2b76ca95cb6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.585320] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 767.585320] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52939cd5-45bb-7b59-31d6-38d2e80eae7c" [ 767.585320] env[69982]: _type = "Task" [ 767.585320] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.597570] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52939cd5-45bb-7b59-31d6-38d2e80eae7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.663599] env[69982]: DEBUG nova.compute.manager [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.663940] env[69982]: DEBUG nova.compute.manager [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing instance network info cache due to event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 767.664250] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Acquiring lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.664501] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Acquired lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.664576] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 767.734090] env[69982]: INFO nova.compute.manager [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Took 0.59 seconds to detach 1 volumes for instance. [ 767.737591] env[69982]: DEBUG nova.compute.manager [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Deleting volume: 7296ca0e-ce02-4177-9218-289c56e21e9a {{(pid=69982) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 767.870450] env[69982]: DEBUG nova.network.neutron [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.971323] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864399, 'name': CreateVM_Task, 'duration_secs': 0.355096} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.971610] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.972127] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.995331] env[69982]: DEBUG nova.compute.manager [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 767.998966] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.998966] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c91b0f0-762f-4637-aa34-f1107c015b97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.010432] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 768.010761] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ee1ad6f-995e-4aa6-b8fb-b40cc0cce353 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.015896] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.746s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.016564] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.021389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.681s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.021663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.023952] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.311s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.026779] env[69982]: INFO nova.compute.claims [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.029275] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 768.029275] env[69982]: value = "task-3864401" [ 768.029275] env[69982]: _type = "Task" [ 768.029275] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.042861] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.061163] env[69982]: INFO nova.scheduler.client.report [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Deleted allocations for instance 743a4a52-ce35-4ec1-9286-e0c470e87186 [ 768.087141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.099287] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52939cd5-45bb-7b59-31d6-38d2e80eae7c, 'name': SearchDatastore_Task, 'duration_secs': 0.013301} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.099622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.099923] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.100157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.100315] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.100673] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.100987] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.101330] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 768.101577] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c01708c-cd8b-48c5-a44f-826db270c101 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.106236] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1802abc-a9ef-46ac-b108-e803d5b9e1bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.113209] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 768.113209] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521b7774-5047-40a7-2591-dc9369809b77" [ 768.113209] env[69982]: _type = "Task" [ 768.113209] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.118297] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.118297] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.119228] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa7c15c9-3f72-4cca-9023-57a365869314 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.126445] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521b7774-5047-40a7-2591-dc9369809b77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.131985] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 768.131985] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bcfa1-335c-39c6-cfdd-5cdeb249016f" [ 768.131985] env[69982]: _type = "Task" [ 768.131985] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.140863] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bcfa1-335c-39c6-cfdd-5cdeb249016f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.302869] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.375072] env[69982]: INFO nova.compute.manager [-] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Took 1.39 seconds to deallocate network for instance. [ 768.533553] env[69982]: DEBUG nova.compute.utils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.538151] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.538151] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.552058] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.574733] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8af336c6-d307-4c3a-b61b-f9a930ef0135 tempest-AttachInterfacesUnderV243Test-1966607628 tempest-AttachInterfacesUnderV243Test-1966607628-project-member] Lock "743a4a52-ce35-4ec1-9286-e0c470e87186" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.230s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.620981] env[69982]: DEBUG nova.policy [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10cd7f1321e64d028034564b29d34521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47175f0d16ed47f1bce853c13287bdc9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.634453] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521b7774-5047-40a7-2591-dc9369809b77, 'name': SearchDatastore_Task, 'duration_secs': 0.013371} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.636178] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updated VIF entry in instance network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 768.636555] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.644970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.645302] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.645562] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.652702] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bcfa1-335c-39c6-cfdd-5cdeb249016f, 'name': SearchDatastore_Task, 'duration_secs': 0.039486} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.653563] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ef2b42a-8f53-4e78-b2e4-67cbbf479ad3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.665236] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 768.665236] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52695e70-3c00-4448-f048-b5b96b06f7fa" [ 768.665236] env[69982]: _type = "Task" [ 768.665236] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.673092] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52695e70-3c00-4448-f048-b5b96b06f7fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.883088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.036352] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.058597] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.146740] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Releasing lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.147077] env[69982]: DEBUG nova.compute.manager [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Received event network-vif-deleted-c81278f4-6f59-40a5-aada-68e9141c3415 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 769.147223] env[69982]: DEBUG nova.compute.manager [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 769.147389] env[69982]: DEBUG nova.compute.manager [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing instance network info cache due to event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 769.147622] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.147807] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.147938] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 769.178328] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52695e70-3c00-4448-f048-b5b96b06f7fa, 'name': SearchDatastore_Task, 'duration_secs': 0.0165} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.182056] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.183353] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 48162423-a117-437e-b171-9a40c7c6f49b/48162423-a117-437e-b171-9a40c7c6f49b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.183353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.183353] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 769.183353] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a95fc68c-e2b6-4902-9914-b9e537b70def {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.186028] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d238943-b9cc-4ced-ac37-bde4b95df6ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.201096] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 769.201096] env[69982]: value = "task-3864402" [ 769.201096] env[69982]: _type = "Task" [ 769.201096] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.202965] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 769.203179] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 769.208808] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7d04ff-119e-4f6e-904a-12477e5271e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.217173] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 769.217173] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bfbf3d-d6a2-b1ca-4b7e-89d1cf0eb4db" [ 769.217173] env[69982]: _type = "Task" [ 769.217173] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.224200] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.235860] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bfbf3d-d6a2-b1ca-4b7e-89d1cf0eb4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.261940] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Successfully created port: add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.296661] env[69982]: DEBUG nova.compute.manager [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 769.296869] env[69982]: DEBUG nova.compute.manager [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing instance network info cache due to event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 769.297756] env[69982]: DEBUG oslo_concurrency.lockutils [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.554124] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864401, 'name': PowerOffVM_Task, 'duration_secs': 1.191317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.554692] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 769.556351] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 769.556351] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ac41495-5e38-4545-83d5-c898eb7e95c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.628774] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 769.629250] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 769.629375] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleting the datastore file [datastore1] bba73604-c54f-4643-9e4c-326b643b3d51 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 769.629697] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82ae6cec-2b2c-4ef6-b7a8-c15a28951e23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.644253] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 769.644253] env[69982]: value = "task-3864404" [ 769.644253] env[69982]: _type = "Task" [ 769.644253] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.657795] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864404, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.723309] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864402, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.743744] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bfbf3d-d6a2-b1ca-4b7e-89d1cf0eb4db, 'name': SearchDatastore_Task, 'duration_secs': 0.024747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.744709] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0cc2352-a4dc-4a1a-a4f6-df2e293bb284 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.755977] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 769.755977] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5299d29f-7138-ec9d-d6d2-626af3ed734c" [ 769.755977] env[69982]: _type = "Task" [ 769.755977] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.773184] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5299d29f-7138-ec9d-d6d2-626af3ed734c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.850273] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7d1610-2f5a-44ae-80b0-ad6187da2b50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.858903] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b68fbbe1-a72d-4fdf-9d45-3f0fdc909817 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.902805] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b27c96-450f-4010-a52e-bfc4795d8bbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.914238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b294d04-70cb-4b0b-b2eb-a29109d669d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.934904] env[69982]: DEBUG nova.compute.provider_tree [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.961553] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.961815] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.962170] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "365b8207-f49b-4ee7-af6f-9d271eed2e38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.962443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 769.962674] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.965846] env[69982]: INFO nova.compute.manager [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Terminating instance [ 770.056131] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.100232] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.100232] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.100232] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.100545] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.100762] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.101138] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.101380] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.101539] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.101708] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.101872] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.102064] env[69982]: DEBUG nova.virt.hardware [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.102952] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acbab3b-ec34-422e-b3f6-73f8052f6dbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.112216] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4002c32-5fd2-47ac-afc6-346119924b6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.118668] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updated VIF entry in instance network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 770.119168] env[69982]: DEBUG nova.network.neutron [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.156819] env[69982]: DEBUG oslo_vmware.api [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864404, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282167} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.157033] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.157235] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 770.157496] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.157684] env[69982]: INFO nova.compute.manager [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Took 2.16 seconds to destroy the instance on the hypervisor. [ 770.157934] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.158138] env[69982]: DEBUG nova.compute.manager [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 770.158235] env[69982]: DEBUG nova.network.neutron [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 770.217679] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64356} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.218075] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 48162423-a117-437e-b171-9a40c7c6f49b/48162423-a117-437e-b171-9a40c7c6f49b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.218299] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.218561] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b4ebb33c-b9bc-424e-b9cd-2d897ef08f2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.227371] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 770.227371] env[69982]: value = "task-3864405" [ 770.227371] env[69982]: _type = "Task" [ 770.227371] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.235696] env[69982]: DEBUG nova.compute.manager [req-8912a85e-2758-4934-92f6-86a3170d2c03 req-9a3c9e01-669d-4f5c-b5fd-a7df0869e848 service nova] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Received event network-vif-deleted-7c2fed4f-eaa2-49d4-9df7-62d75b592224 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.239790] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864405, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.270022] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5299d29f-7138-ec9d-d6d2-626af3ed734c, 'name': SearchDatastore_Task, 'duration_secs': 0.067803} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.270022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.270363] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 770.270473] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f885a41-8b77-4aee-a678-e0846030585f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.279259] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 770.279259] env[69982]: value = "task-3864406" [ 770.279259] env[69982]: _type = "Task" [ 770.279259] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.291055] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.441302] env[69982]: DEBUG nova.scheduler.client.report [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 770.472391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "refresh_cache-365b8207-f49b-4ee7-af6f-9d271eed2e38" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.472391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquired lock "refresh_cache-365b8207-f49b-4ee7-af6f-9d271eed2e38" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.472391] env[69982]: DEBUG nova.network.neutron [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 770.633505] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfe059c7-08c9-40d0-9be2-cab6e93ec0fd req-2e5250eb-f380-477a-9e30-7f6656609cb1 service nova] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.633964] env[69982]: DEBUG oslo_concurrency.lockutils [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.634202] env[69982]: DEBUG nova.network.neutron [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 770.741026] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864405, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079672} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.741026] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.741026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573a0536-5156-4dc6-ba29-58a970cbff80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.774587] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 48162423-a117-437e-b171-9a40c7c6f49b/48162423-a117-437e-b171-9a40c7c6f49b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.774918] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f39eed6b-8605-4c68-ad50-021e6ff61d7e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.801863] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864406, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.804252] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 770.804252] env[69982]: value = "task-3864407" [ 770.804252] env[69982]: _type = "Task" [ 770.804252] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.815556] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864407, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.947631] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.924s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.948181] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 770.955218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.969s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.955406] env[69982]: DEBUG nova.objects.instance [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 770.995926] env[69982]: DEBUG nova.network.neutron [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.012424] env[69982]: DEBUG nova.network.neutron [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 771.179774] env[69982]: DEBUG nova.network.neutron [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.305950] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864406, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.316410] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864407, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.352581] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Successfully updated port: add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.462594] env[69982]: DEBUG nova.compute.utils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 771.477016] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 771.477713] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 771.499496] env[69982]: INFO nova.compute.manager [-] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Took 1.34 seconds to deallocate network for instance. [ 771.543518] env[69982]: DEBUG nova.policy [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08d17e3ac5f40c6890dc8dcc4c559d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efe64e69253d49a6a1146f240506ce39', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 771.685745] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Releasing lock "refresh_cache-365b8207-f49b-4ee7-af6f-9d271eed2e38" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.686213] env[69982]: DEBUG nova.compute.manager [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 771.686419] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.687336] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a425d08d-c465-473e-a8e9-898ea6254e36 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.697144] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.697543] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d05dd178-0973-4c82-af51-64989554a2b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.706210] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 771.706210] env[69982]: value = "task-3864408" [ 771.706210] env[69982]: _type = "Task" [ 771.706210] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.716428] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.803494] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864406, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.817788] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864407, 'name': ReconfigVM_Task, 'duration_secs': 0.614649} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.818357] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 48162423-a117-437e-b171-9a40c7c6f49b/48162423-a117-437e-b171-9a40c7c6f49b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.819179] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd725928-e793-4a60-a1be-e08ac2ffcc54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.829092] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 771.829092] env[69982]: value = "task-3864409" [ 771.829092] env[69982]: _type = "Task" [ 771.829092] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.843828] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864409, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.858115] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.858192] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquired lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.858542] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 771.861270] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received event network-changed-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 771.861464] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing instance network info cache due to event network-changed-f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 771.861668] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Acquiring lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.861805] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Acquired lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.862315] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing network info cache for port f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 771.977764] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 771.984024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ef5f6fea-831f-4cd9-951f-aaa9fbc53267 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.984634] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.771s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.985197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.992359] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.977s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.994275] env[69982]: INFO nova.compute.claims [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 772.006030] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Successfully created port: c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.009506] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.029344] env[69982]: INFO nova.scheduler.client.report [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Deleted allocations for instance ebd9e006-a591-44f7-867c-041731b9d45a [ 772.098139] env[69982]: DEBUG nova.network.neutron [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updated VIF entry in instance network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 772.098139] env[69982]: DEBUG nova.network.neutron [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.223086] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864408, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.305076] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864406, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.971683} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.305434] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.305588] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.305884] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fc93346-f05b-4028-8321-e8b14011b905 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.313549] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 772.313549] env[69982]: value = "task-3864410" [ 772.313549] env[69982]: _type = "Task" [ 772.313549] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.325509] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.337576] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864409, 'name': Rename_Task, 'duration_secs': 0.157703} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.337865] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 772.339024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57f317ce-3f8c-420d-96b2-b7fb3465c03a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.349777] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 772.349777] env[69982]: value = "task-3864411" [ 772.349777] env[69982]: _type = "Task" [ 772.349777] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.370468] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864411, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.479273] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 772.540294] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0e46e2ef-6fdd-4da6-8ae4-433fb3753f6b tempest-ServerRescueTestJSONUnderV235-579572485 tempest-ServerRescueTestJSONUnderV235-579572485-project-member] Lock "ebd9e006-a591-44f7-867c-041731b9d45a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.449s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.599564] env[69982]: DEBUG oslo_concurrency.lockutils [req-fdd08af8-3b19-4461-b042-de96d1e6c302 req-2653f7e9-710e-4c45-ab51-a3abb63f2058 service nova] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.679257] env[69982]: DEBUG nova.compute.manager [req-c31f8b96-1e8b-41cf-9f0e-20411d95187a req-ad6a0af9-bf6c-4e09-af61-2bca64d6750c service nova] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Received event network-vif-deleted-f542e5e0-ebab-4967-94f1-8625e905193e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.716249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.716249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.716249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.716249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.716422] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.725968] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864408, 'name': PowerOffVM_Task, 'duration_secs': 0.752671} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.727328] env[69982]: INFO nova.compute.manager [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Terminating instance [ 772.732036] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 772.732036] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 772.732036] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ba5d730-bf31-4bfe-bc7a-8f52ba381ace {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.760874] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 772.761087] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 772.761302] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Deleting the datastore file [datastore1] 365b8207-f49b-4ee7-af6f-9d271eed2e38 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 772.761629] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48191111-32c8-4dc7-bbd3-2350d0d6d9ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.771699] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for the task: (returnval){ [ 772.771699] env[69982]: value = "task-3864413" [ 772.771699] env[69982]: _type = "Task" [ 772.771699] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.781653] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864413, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.828741] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083555} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.828890] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 772.829704] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38cb94d-8bde-450f-a5dd-a647d3ebd499 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.852784] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 772.853269] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c41a3aa2-fce2-47e3-b820-0ef49c5373ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.886881] env[69982]: DEBUG oslo_vmware.api [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864411, 'name': PowerOnVM_Task, 'duration_secs': 0.481293} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.888517] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 772.888784] env[69982]: INFO nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Took 8.34 seconds to spawn the instance on the hypervisor. [ 772.889010] env[69982]: DEBUG nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.889398] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 772.889398] env[69982]: value = "task-3864414" [ 772.889398] env[69982]: _type = "Task" [ 772.889398] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.890242] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6bf06d-215a-4d14-a984-59dc40cc066e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.912055] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864414, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.927434] env[69982]: DEBUG nova.network.neutron [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updating instance_info_cache with network_info: [{"id": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "address": "fa:16:3e:5c:6b:a1", "network": {"id": "ef4d362f-fee5-4ed0-a85b-2cdf67ac5652", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-904295664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47175f0d16ed47f1bce853c13287bdc9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd5b64d-ce", "ovs_interfaceid": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.992929] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.020457] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.021518] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.022034] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.022164] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.022385] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.022614] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.023044] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.023144] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.023666] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.023759] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.024532] env[69982]: DEBUG nova.virt.hardware [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.025662] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a0b3d9e-6d8f-4809-8eec-a6bfa1449fbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.040019] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updated VIF entry in instance network info cache for port f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 773.040963] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [{"id": "f53bc105-863e-4018-a52b-e0115c017916", "address": "fa:16:3e:6c:bd:6c", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf53bc105-86", "ovs_interfaceid": "f53bc105-863e-4018-a52b-e0115c017916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.043607] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf6bd69-7325-4dd3-8a6c-53e2a72aab08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.233807] env[69982]: DEBUG nova.compute.manager [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 773.234249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.234954] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a80499d6-ec56-47ac-8c22-d2dfc9a4e56a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.243800] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.246746] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d83b7485-3e45-45d3-94ca-5f6dca944f11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.258579] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 773.258579] env[69982]: value = "task-3864415" [ 773.258579] env[69982]: _type = "Task" [ 773.258579] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.269346] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864415, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.282276] env[69982]: DEBUG oslo_vmware.api [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Task: {'id': task-3864413, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.10028} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.282597] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 773.283157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 773.283157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 773.283157] env[69982]: INFO nova.compute.manager [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Took 1.60 seconds to destroy the instance on the hypervisor. [ 773.283436] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.283567] env[69982]: DEBUG nova.compute.manager [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 773.283664] env[69982]: DEBUG nova.network.neutron [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 773.306493] env[69982]: DEBUG nova.network.neutron [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 773.403771] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864414, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.423288] env[69982]: INFO nova.compute.manager [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Took 44.53 seconds to build instance. [ 773.431085] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Releasing lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.431438] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Instance network_info: |[{"id": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "address": "fa:16:3e:5c:6b:a1", "network": {"id": "ef4d362f-fee5-4ed0-a85b-2cdf67ac5652", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-904295664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47175f0d16ed47f1bce853c13287bdc9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd5b64d-ce", "ovs_interfaceid": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 773.431901] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:6b:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'add5b64d-ceb9-4750-8e48-49d509a81e5f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.444540] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Creating folder: Project (47175f0d16ed47f1bce853c13287bdc9). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.448586] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c84fb570-6ee8-43e6-a369-5a135996ba10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.464033] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Created folder: Project (47175f0d16ed47f1bce853c13287bdc9) in parent group-v767796. [ 773.464033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Creating folder: Instances. Parent ref: group-v767907. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 773.466698] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37fd6420-c55a-4dac-bae6-e58bbb702cdf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.481241] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Created folder: Instances in parent group-v767907. [ 773.481495] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.481714] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.481947] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71a894dc-9600-48da-95b1-8d356d4b3355 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.506227] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.506227] env[69982]: value = "task-3864418" [ 773.506227] env[69982]: _type = "Task" [ 773.506227] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.516536] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864418, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.550666] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Releasing lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.550979] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Received event network-vif-plugged-add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.551221] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Acquiring lock "7af5a14d-f586-4746-9831-8be255581637-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.551440] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Lock "7af5a14d-f586-4746-9831-8be255581637-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.551608] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Lock "7af5a14d-f586-4746-9831-8be255581637-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.551778] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] No waiting events found dispatching network-vif-plugged-add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 773.551951] env[69982]: WARNING nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Received unexpected event network-vif-plugged-add5b64d-ceb9-4750-8e48-49d509a81e5f for instance with vm_state building and task_state spawning. [ 773.552142] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received event network-changed-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.552333] env[69982]: DEBUG nova.compute.manager [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing instance network info cache due to event network-changed-f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 773.552555] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Acquiring lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.552699] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Acquired lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.552859] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Refreshing network info cache for port f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 773.601119] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb496c4a-f52d-4c67-97d4-55ed40e6fe6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.610144] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb233fab-1164-4cee-9edb-35dab1cd3686 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.641887] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702d7866-68c1-43c0-8be4-d3b4f71155d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.651959] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462328aa-55ef-42d7-9464-98778c584e97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.671233] env[69982]: DEBUG nova.compute.provider_tree [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.769426] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864415, 'name': PowerOffVM_Task, 'duration_secs': 0.27189} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.775490] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 773.775705] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 773.776012] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43c3bba5-c095-4168-8645-f31d65a7e207 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.808723] env[69982]: DEBUG nova.network.neutron [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.854579] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 773.854815] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 773.855009] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleting the datastore file [datastore1] 24641406-5292-4497-b34f-9af0dcdc58d7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 773.855295] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-065e530f-219c-4025-939d-2db3bbd2eca2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.864825] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 773.864825] env[69982]: value = "task-3864420" [ 773.864825] env[69982]: _type = "Task" [ 773.864825] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.880642] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864420, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.905984] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864414, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.931088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a7694c1-e4c4-4f1d-9b9d-f814387bc73e tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.086s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.017577] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864418, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.173394] env[69982]: DEBUG nova.scheduler.client.report [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.311610] env[69982]: INFO nova.compute.manager [-] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Took 1.03 seconds to deallocate network for instance. [ 774.378907] env[69982]: DEBUG oslo_vmware.api [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864420, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429847} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.378907] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 774.378907] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 774.379869] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 774.379869] env[69982]: INFO nova.compute.manager [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 774.379869] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 774.380148] env[69982]: DEBUG nova.compute.manager [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 774.380148] env[69982]: DEBUG nova.network.neutron [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 774.404875] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864414, 'name': ReconfigVM_Task, 'duration_secs': 1.414155} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.408665] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Reconfigured VM instance instance-00000024 to attach disk [datastore2] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.413195] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48b4b8a7-46dc-4ae6-9f8d-2c2bb9483013 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.419655] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 774.419655] env[69982]: value = "task-3864421" [ 774.419655] env[69982]: _type = "Task" [ 774.419655] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.431521] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864421, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.434776] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 774.462092] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Successfully updated port: c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.529025] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864418, 'name': CreateVM_Task, 'duration_secs': 0.575777} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.529025] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 774.529025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.529358] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.529492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 774.529904] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9459634c-8dc0-4129-a638-89a7c957c564 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.538035] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 774.538035] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52658ff9-28d3-3acd-383d-c797befd768a" [ 774.538035] env[69982]: _type = "Task" [ 774.538035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.553357] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52658ff9-28d3-3acd-383d-c797befd768a, 'name': SearchDatastore_Task, 'duration_secs': 0.013103} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.556277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.556520] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.556791] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.556940] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.557139] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.557576] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d853ee8f-a866-4580-b2f3-5b296ea8dae4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.569673] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.569999] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.570980] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3703004-196e-4327-8f0e-2cc1a8119018 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.582021] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 774.582021] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5246c494-1da7-65f0-0968-e58387f1de70" [ 774.582021] env[69982]: _type = "Task" [ 774.582021] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.596905] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5246c494-1da7-65f0-0968-e58387f1de70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.631121] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updated VIF entry in instance network info cache for port f53bc105-863e-4018-a52b-e0115c017916. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 774.634173] env[69982]: DEBUG nova.network.neutron [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [{"id": "f53bc105-863e-4018-a52b-e0115c017916", "address": "fa:16:3e:6c:bd:6c", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf53bc105-86", "ovs_interfaceid": "f53bc105-863e-4018-a52b-e0115c017916", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.679715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.691s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.680358] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.684431] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.319s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.686582] env[69982]: INFO nova.compute.claims [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 774.820271] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.930675] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864421, 'name': Rename_Task, 'duration_secs': 0.284495} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.931076] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 774.931266] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de605413-f16a-4646-80bc-9ba440f5e89f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.940548] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 774.940548] env[69982]: value = "task-3864422" [ 774.940548] env[69982]: _type = "Task" [ 774.940548] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.954876] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.968603] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.968603] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.968603] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 774.975091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.091299] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5246c494-1da7-65f0-0968-e58387f1de70, 'name': SearchDatastore_Task, 'duration_secs': 0.013188} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.092246] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44d3545c-e05f-4847-a09e-6580e7418c9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.099270] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 775.099270] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528947a2-18f4-beae-3f31-e4121e2f1426" [ 775.099270] env[69982]: _type = "Task" [ 775.099270] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.111741] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528947a2-18f4-beae-3f31-e4121e2f1426, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.136867] env[69982]: DEBUG oslo_concurrency.lockutils [req-1ee2076d-a71c-4444-a42d-2af238c46a5f req-d655bb10-53b7-4c50-827f-ceaa9d5f9446 service nova] Releasing lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.191402] env[69982]: DEBUG nova.compute.manager [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Received event network-changed-add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.191661] env[69982]: DEBUG nova.compute.manager [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Refreshing instance network info cache due to event network-changed-add5b64d-ceb9-4750-8e48-49d509a81e5f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 775.191893] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Acquiring lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.192051] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Acquired lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.192262] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Refreshing network info cache for port add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 775.194878] env[69982]: DEBUG nova.compute.utils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 775.200986] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 775.200986] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 775.367110] env[69982]: DEBUG nova.policy [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a4ecf3bf94764bbea25d59a4fea2ebda', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60aa47f826ce4ba7b14d6937eef58338', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.457523] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864422, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.558173] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 775.612082] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528947a2-18f4-beae-3f31-e4121e2f1426, 'name': SearchDatastore_Task, 'duration_secs': 0.013849} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.612675] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.613207] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 7af5a14d-f586-4746-9831-8be255581637/7af5a14d-f586-4746-9831-8be255581637.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.614265] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77d055ac-8d51-45b5-acf4-38822597251c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.627118] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 775.627118] env[69982]: value = "task-3864423" [ 775.627118] env[69982]: _type = "Task" [ 775.627118] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.644306] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864423, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.701747] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.939877] env[69982]: DEBUG nova.network.neutron [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.956787] env[69982]: DEBUG oslo_vmware.api [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864422, 'name': PowerOnVM_Task, 'duration_secs': 0.808194} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.957035] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 775.958826] env[69982]: INFO nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Took 8.66 seconds to spawn the instance on the hypervisor. [ 775.958826] env[69982]: DEBUG nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 775.959870] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a772c7-067a-4a55-858c-41c004364c3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.038714] env[69982]: DEBUG nova.network.neutron [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.120324] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updated VIF entry in instance network info cache for port add5b64d-ceb9-4750-8e48-49d509a81e5f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 776.120701] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updating instance_info_cache with network_info: [{"id": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "address": "fa:16:3e:5c:6b:a1", "network": {"id": "ef4d362f-fee5-4ed0-a85b-2cdf67ac5652", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-904295664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47175f0d16ed47f1bce853c13287bdc9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd5b64d-ce", "ovs_interfaceid": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.144178] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864423, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.160384] env[69982]: DEBUG nova.compute.manager [req-d227d3f3-1476-4f2d-9ca4-338740754961 req-7dc6f031-af05-45ea-9b1b-35ff49c99e28 service nova] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Received event network-vif-deleted-f53bc105-863e-4018-a52b-e0115c017916 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.348172] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Successfully created port: e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 776.442760] env[69982]: INFO nova.compute.manager [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 2.06 seconds to deallocate network for instance. [ 776.493192] env[69982]: INFO nova.compute.manager [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Took 47.05 seconds to build instance. [ 776.534605] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3e182c-6001-4738-b455-a2da993f5417 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.544239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.545047] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Instance network_info: |[{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 776.545590] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:d2:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c816f035-7c9d-47ba-8b3b-29a57ec10561', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 776.555275] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating folder: Project (efe64e69253d49a6a1146f240506ce39). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.556378] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1453ecf3-b180-48f5-9650-077a491199da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.564678] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac2f817b-0311-4485-955b-357dcf3fd6aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.604413] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0184b850-3548-4600-a64f-e6d8b524e626 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.607073] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created folder: Project (efe64e69253d49a6a1146f240506ce39) in parent group-v767796. [ 776.607276] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating folder: Instances. Parent ref: group-v767910. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 776.607523] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fdf7865-be92-42fb-a6de-413b4880cb46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.615346] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334c1dc6-2b17-4783-ac2d-68c028ab13dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.626914] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created folder: Instances in parent group-v767910. [ 776.626914] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.626914] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 776.626914] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-86277226-a0a5-4e2d-a54b-a68d9e7626a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.650342] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Releasing lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.650590] env[69982]: DEBUG nova.compute.manager [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.650762] env[69982]: DEBUG nova.compute.manager [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing instance network info cache due to event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 776.651129] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.651269] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.651555] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 776.652836] env[69982]: DEBUG nova.compute.provider_tree [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 776.664062] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864423, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597095} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.671478] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 7af5a14d-f586-4746-9831-8be255581637/7af5a14d-f586-4746-9831-8be255581637.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.671478] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.671478] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 776.671478] env[69982]: value = "task-3864426" [ 776.671478] env[69982]: _type = "Task" [ 776.671478] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.671478] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3bbbe47d-3f8a-4a8b-955c-be778606d50c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.688124] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864426, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.690675] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 776.690675] env[69982]: value = "task-3864427" [ 776.690675] env[69982]: _type = "Task" [ 776.690675] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.704649] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864427, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.716669] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.760313] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:06:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d19589d5-9552-4797-87a2-fa71245a23ed',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-913055492',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.761519] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.761519] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.761519] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.761519] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.761519] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.761894] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.761894] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.761894] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.761894] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.762988] env[69982]: DEBUG nova.virt.hardware [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.762988] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f2d519d-fe49-4991-b8b0-29c8610df656 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.773916] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7c5687-3fca-4c35-83a9-1415f0db2baf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.959044] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.997220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f2b8b9ef-3c85-4707-be7c-4f69e401fee4 tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.624s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.070384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.070525] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.161376] env[69982]: DEBUG nova.scheduler.client.report [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.189020] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864426, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.202841] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864427, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.202841] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 777.203702] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08434b4d-8c53-40e0-bedd-9312f44bc707 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.236829] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 7af5a14d-f586-4746-9831-8be255581637/7af5a14d-f586-4746-9831-8be255581637.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.237774] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b06c4d7-7bf4-4b9d-b444-3b6e38669b61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.267664] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 777.267664] env[69982]: value = "task-3864428" [ 777.267664] env[69982]: _type = "Task" [ 777.267664] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.279700] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864428, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.501205] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 777.513362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "48162423-a117-437e-b171-9a40c7c6f49b" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.513630] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.513837] env[69982]: INFO nova.compute.manager [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Rebooting instance [ 777.534514] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updated VIF entry in instance network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 777.535721] env[69982]: DEBUG nova.network.neutron [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.637800] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "c34355fa-3712-4338-942d-acdb2f8a91ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.638072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.667544] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.983s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.668563] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 777.671162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.242s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.675949] env[69982]: INFO nova.compute.claims [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.686328] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864426, 'name': CreateVM_Task, 'duration_secs': 0.680234} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.686523] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 777.687223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.687382] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.687753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 777.688044] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8837fba5-714c-4cdc-b7bb-8c9543df0b4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.694126] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 777.694126] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5268554b-0277-6d5d-29cc-ed87a69e106c" [ 777.694126] env[69982]: _type = "Task" [ 777.694126] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.704682] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5268554b-0277-6d5d-29cc-ed87a69e106c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.781618] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864428, 'name': ReconfigVM_Task, 'duration_secs': 0.300664} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.782402] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 7af5a14d-f586-4746-9831-8be255581637/7af5a14d-f586-4746-9831-8be255581637.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.783634] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abc5ce1f-6c66-4080-a11e-88e64d415b95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.794022] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 777.794022] env[69982]: value = "task-3864429" [ 777.794022] env[69982]: _type = "Task" [ 777.794022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.809867] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864429, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.967549] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Received event network-vif-plugged-c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.967773] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 777.968015] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 777.968426] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.968682] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] No waiting events found dispatching network-vif-plugged-c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 777.969057] env[69982]: WARNING nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Received unexpected event network-vif-plugged-c816f035-7c9d-47ba-8b3b-29a57ec10561 for instance with vm_state building and task_state spawning. [ 777.969311] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Received event network-changed-c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.969516] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Refreshing instance network info cache due to event network-changed-c816f035-7c9d-47ba-8b3b-29a57ec10561. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 777.969811] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.970057] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.970310] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Refreshing network info cache for port c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 778.040441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.040441] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e1f2c20-d2a5-4968-b9a6-18b1b6bbb1f6 req-737acb88-b88d-4d3a-be40-90c7ece9c056 service nova] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.053479] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.053673] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquired lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.053851] env[69982]: DEBUG nova.network.neutron [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 778.184633] env[69982]: DEBUG nova.compute.utils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 778.185359] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 778.185526] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 778.207889] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5268554b-0277-6d5d-29cc-ed87a69e106c, 'name': SearchDatastore_Task, 'duration_secs': 0.011529} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.207889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.208192] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 778.208374] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.208528] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.211836] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 778.211836] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c90bf80e-2c56-4a28-a8bd-b438b718891c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.222035] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 778.222284] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 778.223372] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9656820c-1faf-4dab-a6cb-4aaabad1f238 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.232219] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 778.232219] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528ad8e7-b2ca-0e5e-af4d-24577a3ece17" [ 778.232219] env[69982]: _type = "Task" [ 778.232219] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.245504] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528ad8e7-b2ca-0e5e-af4d-24577a3ece17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.252104] env[69982]: DEBUG nova.policy [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 778.306487] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864429, 'name': Rename_Task, 'duration_secs': 0.162137} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.307062] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.307172] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7becc7bb-1b25-4cbb-b997-7790a294c5fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.317748] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 778.317748] env[69982]: value = "task-3864430" [ 778.317748] env[69982]: _type = "Task" [ 778.317748] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.330049] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.441209] env[69982]: DEBUG nova.compute.manager [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.441209] env[69982]: DEBUG nova.compute.manager [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing instance network info cache due to event network-changed-44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 778.441531] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] Acquiring lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.441868] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] Acquired lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.442184] env[69982]: DEBUG nova.network.neutron [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Refreshing network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 778.698160] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 778.746387] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528ad8e7-b2ca-0e5e-af4d-24577a3ece17, 'name': SearchDatastore_Task, 'duration_secs': 0.014494} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.751916] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccb5254f-4182-4fa6-add6-0f96de5d317a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.755184] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.759973] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 778.759973] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fbb8f-7147-d926-4ebe-4441f13d5365" [ 778.759973] env[69982]: _type = "Task" [ 778.759973] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.764808] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Successfully created port: 8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 778.780152] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fbb8f-7147-d926-4ebe-4441f13d5365, 'name': SearchDatastore_Task, 'duration_secs': 0.017904} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.780443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.780704] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 778.781059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13eeed66-0a65-4aa6-b6e3-bdaff9c3737d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.792130] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 778.792130] env[69982]: value = "task-3864431" [ 778.792130] env[69982]: _type = "Task" [ 778.792130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.802773] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.839637] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864430, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.861997] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Successfully updated port: e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 779.034922] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updated VIF entry in instance network info cache for port c816f035-7c9d-47ba-8b3b-29a57ec10561. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 779.035361] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.037418] env[69982]: DEBUG nova.network.neutron [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.309196] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864431, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.333355] env[69982]: DEBUG oslo_vmware.api [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864430, 'name': PowerOnVM_Task, 'duration_secs': 0.576182} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.333732] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.333972] env[69982]: INFO nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Took 9.28 seconds to spawn the instance on the hypervisor. [ 779.334298] env[69982]: DEBUG nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.335774] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05076e40-2c4e-42ed-b8a7-a3b1b6da7c4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.368568] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.369067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.369067] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 779.419564] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e00168-5a9e-47ff-a91c-46c5e581222c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.429672] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc5b999-c21d-400e-a36c-7a9781d9ac6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.465107] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd3e8fa-7e45-4b6d-9c6d-cb766d01ac33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.475785] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc0450d-020c-4ba4-aa0d-b25933bacb67 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.492989] env[69982]: DEBUG nova.compute.provider_tree [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.542939] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.543177] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.543355] env[69982]: DEBUG nova.compute.manager [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing instance network info cache due to event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.543587] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Acquiring lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.544068] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Releasing lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.545221] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Acquired lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.545411] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 779.718801] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 779.745541] env[69982]: DEBUG nova.network.neutron [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updated VIF entry in instance network info cache for port 44b0fa67-6d20-4612-a177-c8b0ed39faaf. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 779.745872] env[69982]: DEBUG nova.network.neutron [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [{"id": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "address": "fa:16:3e:75:a9:c0", "network": {"id": "2185dd72-077a-43a8-959e-b94144cd6d34", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1495184724-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "66125bb452294cea84c97f820c3e94ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7150f662-0cf1-44f9-ae14-d70f479649b6", "external-id": "nsx-vlan-transportzone-712", "segmentation_id": 712, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44b0fa67-6d", "ovs_interfaceid": "44b0fa67-6d20-4612-a177-c8b0ed39faaf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.761102] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 779.762172] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.762172] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 779.762172] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.762172] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 779.762304] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 779.762512] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 779.762729] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 779.762937] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 779.763145] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 779.763337] env[69982]: DEBUG nova.virt.hardware [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 779.764656] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbe1bef-930b-46d4-ad1c-bc694da4ab3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.768487] env[69982]: INFO nova.compute.manager [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Rebuilding instance [ 779.776564] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b133bc9d-26ab-4c1a-9642-75a0ab1df737 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.802738] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.864321} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.806905] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 779.807213] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 779.807703] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f3958561-1331-442b-9513-30ab60b311a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.817126] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 779.817126] env[69982]: value = "task-3864432" [ 779.817126] env[69982]: _type = "Task" [ 779.817126] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.828483] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864432, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.834887] env[69982]: DEBUG nova.compute.manager [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.837498] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7660e7-4bb0-4159-ae25-6b73177d7bc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.863921] env[69982]: INFO nova.compute.manager [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Took 46.14 seconds to build instance. [ 779.940144] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 779.996070] env[69982]: DEBUG nova.scheduler.client.report [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.051231] env[69982]: DEBUG nova.compute.manager [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.052126] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb92498-e038-4391-bbe4-a9a322588706 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.229926] env[69982]: DEBUG nova.network.neutron [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.248658] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fbe5088-a507-4343-9203-865ba7fa07e8 req-8b91522f-e1c5-4e55-ae85-617a05bcee64 service nova] Releasing lock "refresh_cache-589419ea-c609-45bb-bde5-3b22d9ff111e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.333799] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864432, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.199271} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.336298] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 780.337237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99724180-7637-4ff1-986b-1700a6ccbc63 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.372458] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.373321] env[69982]: DEBUG oslo_concurrency.lockutils [None req-96d52cbc-d7c5-4df2-9349-97eb8200c851 tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.774s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.373841] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8929ffcb-9a23-46de-9c40-49a43580f363 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.397653] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 780.397653] env[69982]: value = "task-3864433" [ 780.397653] env[69982]: _type = "Task" [ 780.397653] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.407659] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864433, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.508038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.834s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.508038] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 780.509645] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.350s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.509645] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.511581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.323s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.511767] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.515183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.413s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.515183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.515362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 24.326s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.515533] env[69982]: DEBUG nova.objects.instance [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 780.522036] env[69982]: DEBUG nova.compute.manager [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Received event network-vif-plugged-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.522036] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Acquiring lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.522036] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.522036] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.522036] env[69982]: DEBUG nova.compute.manager [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] No waiting events found dispatching network-vif-plugged-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 780.522513] env[69982]: WARNING nova.compute.manager [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Received unexpected event network-vif-plugged-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a for instance with vm_state building and task_state spawning. [ 780.522513] env[69982]: DEBUG nova.compute.manager [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Received event network-changed-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.522513] env[69982]: DEBUG nova.compute.manager [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Refreshing instance network info cache due to event network-changed-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 780.522513] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.557501] env[69982]: INFO nova.scheduler.client.report [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Deleted allocations for instance 5ba60eb7-ee6f-47e2-b6ca-b54817dab371 [ 780.558385] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updated VIF entry in instance network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 780.558728] env[69982]: DEBUG nova.network.neutron [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.567765] env[69982]: INFO nova.scheduler.client.report [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocations for instance f9124657-d1c5-4a93-9d4a-3b06ca60ec63 [ 780.589158] env[69982]: INFO nova.scheduler.client.report [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Deleted allocations for instance bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7 [ 780.685464] env[69982]: DEBUG nova.compute.manager [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Received event network-vif-plugged-8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.685704] env[69982]: DEBUG oslo_concurrency.lockutils [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] Acquiring lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.685938] env[69982]: DEBUG oslo_concurrency.lockutils [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 780.686295] env[69982]: DEBUG oslo_concurrency.lockutils [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.686383] env[69982]: DEBUG nova.compute.manager [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] No waiting events found dispatching network-vif-plugged-8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 780.686526] env[69982]: WARNING nova.compute.manager [req-96ed368e-4aa9-4f46-9212-f10b0c3ec2c2 req-dbbe938f-909d-43dc-97ed-e2f3a2be0b7d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Received unexpected event network-vif-plugged-8614e294-a2cc-4a82-ab33-bbc5def50f56 for instance with vm_state building and task_state spawning. [ 780.736024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.736024] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Instance network_info: |[{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 780.736529] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.736529] env[69982]: DEBUG nova.network.neutron [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Refreshing network info cache for port e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 780.738851] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f8:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e0bdee0a-ac19-47d1-9d6b-baffaa7a181a', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 780.751364] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 780.752587] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 780.753115] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5ac3e16-bbff-41d6-ad79-3cab46fe8100 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.777497] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 780.777497] env[69982]: value = "task-3864434" [ 780.777497] env[69982]: _type = "Task" [ 780.777497] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.786493] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864434, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.875458] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 780.875805] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-072de54d-a2ee-43e3-84ba-356abb4fd2c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.883332] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 780.883332] env[69982]: value = "task-3864435" [ 780.883332] env[69982]: _type = "Task" [ 780.883332] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.893867] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 780.898083] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.910403] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864433, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.021126] env[69982]: DEBUG nova.compute.utils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.022614] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.026098] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.073024] env[69982]: DEBUG oslo_concurrency.lockutils [req-4dea7820-25a6-4eab-93db-d04f42a9d3de req-8f4bcb19-453c-44b0-9aaf-80291523080f service nova] Releasing lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.074180] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96850a0-9ca1-43a7-a2c1-8ae155207961 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.078216] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2f01a954-669e-4d52-bb1f-fc2a4c5e06f1 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271 tempest-FloatingIPsAssociationNegativeTestJSON-1104366271-project-member] Lock "5ba60eb7-ee6f-47e2-b6ca-b54817dab371" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.494s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.083226] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b0bdb95e-bfbd-46e9-bd19-77c09fa51ea3 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "f9124657-d1c5-4a93-9d4a-3b06ca60ec63" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.608s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.088300] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Doing hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 781.089040] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-d8578ed9-5cea-49a3-9673-d841148888db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.100077] env[69982]: DEBUG oslo_vmware.api [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 781.100077] env[69982]: value = "task-3864436" [ 781.100077] env[69982]: _type = "Task" [ 781.100077] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.100729] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fad57b-f36a-4eb3-b324-f57a189861b2 tempest-ServerDiagnosticsNegativeTest-2118027082 tempest-ServerDiagnosticsNegativeTest-2118027082-project-member] Lock "bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.468s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.115534] env[69982]: DEBUG oslo_vmware.api [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864436, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.207353] env[69982]: DEBUG nova.policy [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99858c5fbda7454cab0188cf368e51f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83b53a0998874810b5302415624592cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.285838] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Successfully updated port: 8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 781.293797] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864434, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.392725] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864435, 'name': PowerOffVM_Task, 'duration_secs': 0.402091} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.392995] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 781.394283] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 781.395038] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65fac5e7-d04a-4420-baf5-0b3d3b119c4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.411998] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 781.412369] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99f8cf84-f06a-4c13-b2b6-439f9c4621b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.421936] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864433, 'name': ReconfigVM_Task, 'duration_secs': 0.783318} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.422838] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.423024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-852d54fe-7f27-40a3-8f75-31594eb22894 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.428849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.438299] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 781.438299] env[69982]: value = "task-3864438" [ 781.438299] env[69982]: _type = "Task" [ 781.438299] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.453342] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864438, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.460034] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 781.460034] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 781.460034] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Deleting the datastore file [datastore2] d73153ad-9258-4c3c-9699-b6364408d631 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 781.460034] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af54be50-9890-4275-91c1-089f7b128c2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.469148] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 781.469148] env[69982]: value = "task-3864439" [ 781.469148] env[69982]: _type = "Task" [ 781.469148] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.481424] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864439, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.541481] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 781.545510] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cec982ec-2004-4db7-bd13-14383ac18346 tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.030s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.547995] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.341s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.551443] env[69982]: INFO nova.compute.claims [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.615185] env[69982]: DEBUG oslo_vmware.api [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864436, 'name': ResetVM_Task, 'duration_secs': 0.123572} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.616665] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Did hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 781.616665] env[69982]: DEBUG nova.compute.manager [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.616878] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849af054-905f-4b16-bbab-fb4117603b64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.779359] env[69982]: DEBUG nova.network.neutron [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updated VIF entry in instance network info cache for port e0bdee0a-ac19-47d1-9d6b-baffaa7a181a. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 781.780799] env[69982]: DEBUG nova.network.neutron [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.792211] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Successfully created port: 43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 781.794780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.794911] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.795062] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 781.804604] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864434, 'name': CreateVM_Task, 'duration_secs': 0.553745} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.804797] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 781.805577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.805732] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.806081] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 781.806631] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b7cb94f-744f-4ca7-8d85-cac866469c2e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.812950] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 781.812950] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d4367-b38f-1abc-e7c1-8c4eb3d55cf0" [ 781.812950] env[69982]: _type = "Task" [ 781.812950] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.822450] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d4367-b38f-1abc-e7c1-8c4eb3d55cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.951016] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864438, 'name': Rename_Task, 'duration_secs': 0.188028} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.954267] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 781.954438] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c18f1e4f-0098-4ad8-bb2c-7103796a6243 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.966317] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 781.966317] env[69982]: value = "task-3864440" [ 781.966317] env[69982]: _type = "Task" [ 781.966317] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.992452] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864440, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.996917] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864439, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137732} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.997257] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 781.997448] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 781.997626] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 782.133540] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a63040d7-1ee8-4d93-9bf4-f1cd9d749c36 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.620s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.299290] env[69982]: DEBUG oslo_concurrency.lockutils [req-d62ff476-3da7-49f0-a326-ad2d66d6cfa3 req-ceb502eb-e8bf-4292-a2a7-318bdbaf9fc1 service nova] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.326164] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d4367-b38f-1abc-e7c1-8c4eb3d55cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.020536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.326482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 782.326711] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 782.326944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.327102] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.327283] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 782.327548] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9c2b121-b121-468c-9a7d-a83c1dadeb3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.343312] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 782.344019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 782.344300] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e4fcab3-0e96-4c28-beaf-ba8a72328d83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.350230] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 782.350230] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5293bce9-7b45-a1c3-0f42-47bed1a463fd" [ 782.350230] env[69982]: _type = "Task" [ 782.350230] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.355505] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 782.363786] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5293bce9-7b45-a1c3-0f42-47bed1a463fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.477902] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864440, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.553226] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 782.583699] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 782.583971] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.584126] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 782.584311] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.584452] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 782.584596] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 782.584798] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 782.584954] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 782.585344] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 782.585530] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 782.586196] env[69982]: DEBUG nova.virt.hardware [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 782.587689] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de5c8a3-507f-47aa-879f-e1afb3cd2171 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.605018] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5bfb08-16ba-4a99-b0cb-009fe6368d9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.673714] env[69982]: DEBUG nova.network.neutron [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Updating instance_info_cache with network_info: [{"id": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "address": "fa:16:3e:31:68:35", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8614e294-a2", "ovs_interfaceid": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.863256] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5293bce9-7b45-a1c3-0f42-47bed1a463fd, 'name': SearchDatastore_Task, 'duration_secs': 0.027494} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.864652] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32bed4ef-c897-4e91-8597-7f0df0ea7c5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.872428] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 782.872428] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52282ad6-095f-7e6e-1247-2a5e136e84cd" [ 782.872428] env[69982]: _type = "Task" [ 782.872428] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.886430] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52282ad6-095f-7e6e-1247-2a5e136e84cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.991375] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864440, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.060720] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.061074] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.061307] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.061736] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.061978] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.062233] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.062542] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.062779] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.063064] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.063415] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.063581] env[69982]: DEBUG nova.virt.hardware [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.064678] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d9c22d-fbce-4261-99a4-867533817b02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.074609] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92d03a5-a313-4f16-9370-a06979a08aaa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.094513] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.102486] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.103330] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.104107] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14c77fa4-3700-4f69-9049-56b4058bc201 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.122845] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.122845] env[69982]: value = "task-3864441" [ 783.122845] env[69982]: _type = "Task" [ 783.122845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.133271] env[69982]: DEBUG nova.compute.manager [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Received event network-changed-8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.133568] env[69982]: DEBUG nova.compute.manager [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Refreshing instance network info cache due to event network-changed-8614e294-a2cc-4a82-ab33-bbc5def50f56. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.133834] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Acquiring lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.137703] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864441, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.183980] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.183980] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Instance network_info: |[{"id": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "address": "fa:16:3e:31:68:35", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8614e294-a2", "ovs_interfaceid": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 783.184669] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Acquired lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.184669] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Refreshing network info cache for port 8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 783.185593] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:68:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8614e294-a2cc-4a82-ab33-bbc5def50f56', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 783.197390] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating folder: Project (7036b34d089a4ca0b779e9ab9b86cc77). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.201947] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-35a88a54-714a-4d27-80da-0d95ccc01d53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.217320] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created folder: Project (7036b34d089a4ca0b779e9ab9b86cc77) in parent group-v767796. [ 783.217320] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating folder: Instances. Parent ref: group-v767915. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 783.217320] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3fc71a50-98ce-4d57-9381-2a442d26f549 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.231239] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created folder: Instances in parent group-v767915. [ 783.231739] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 783.233030] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 783.233030] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32a665ef-151c-4a04-a0ad-9349798fb746 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.262143] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 783.262143] env[69982]: value = "task-3864444" [ 783.262143] env[69982]: _type = "Task" [ 783.262143] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.272996] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b85c1b6-332e-4172-a159-3e86906c3281 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.287940] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ad3153-9839-49ea-b29e-33f6e46bf12e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.293415] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864444, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.326069] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947ed65e-5140-4f6d-b010-b2a7a1b20ac4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.334963] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa5ec4a-1f3f-4319-a6e0-13ba7e4e3367 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.350855] env[69982]: DEBUG nova.compute.provider_tree [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.387143] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52282ad6-095f-7e6e-1247-2a5e136e84cd, 'name': SearchDatastore_Task, 'duration_secs': 0.015487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.391030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.391030] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 783.391030] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20d90764-d6fb-4191-8cb6-f8d8a98e5856 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.397616] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 783.397616] env[69982]: value = "task-3864445" [ 783.397616] env[69982]: _type = "Task" [ 783.397616] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.409780] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864445, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.484938] env[69982]: DEBUG oslo_vmware.api [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864440, 'name': PowerOnVM_Task, 'duration_secs': 1.363646} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.485363] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 783.485661] env[69982]: INFO nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Took 10.49 seconds to spawn the instance on the hypervisor. [ 783.485861] env[69982]: DEBUG nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.486946] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2b7870-589a-4e73-8478-c2c980088805 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.621625] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "cd839916-6daf-4b31-941d-6305a585bfaa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.622506] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.638088] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864441, 'name': CreateVM_Task, 'duration_secs': 0.382872} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.638346] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.639360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.639590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.640105] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 783.640450] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec6cf37d-6399-405f-8ab4-e2bb3267bcb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.649402] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 783.649402] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e90c86-d908-50d1-dc4c-f64aaab38a7d" [ 783.649402] env[69982]: _type = "Task" [ 783.649402] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.660741] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e90c86-d908-50d1-dc4c-f64aaab38a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.773487] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864444, 'name': CreateVM_Task, 'duration_secs': 0.428869} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.773672] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 783.774534] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.858079] env[69982]: DEBUG nova.scheduler.client.report [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.908915] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864445, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.003613] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Successfully updated port: 43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.018075] env[69982]: INFO nova.compute.manager [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Took 43.32 seconds to build instance. [ 784.093121] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Updated VIF entry in instance network info cache for port 8614e294-a2cc-4a82-ab33-bbc5def50f56. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 784.093553] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Updating instance_info_cache with network_info: [{"id": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "address": "fa:16:3e:31:68:35", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8614e294-a2", "ovs_interfaceid": "8614e294-a2cc-4a82-ab33-bbc5def50f56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.164698] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e90c86-d908-50d1-dc4c-f64aaab38a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.012769} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.165343] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.165722] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 784.166687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.167106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.167432] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 784.167858] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.168297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 784.172126] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f485a305-849f-4eaf-92b0-3d957acbde47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.172126] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0574b2d2-54c2-440d-a3fb-7e4b95c9f2a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.176884] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 784.176884] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6300a-d75f-7cc1-8c01-710d987e7311" [ 784.176884] env[69982]: _type = "Task" [ 784.176884] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.181909] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 784.182327] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 784.183552] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da8d1498-0224-4178-812b-cf3c1200df1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.189451] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6300a-d75f-7cc1-8c01-710d987e7311, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.193683] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 784.193683] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994a2d-6d03-9687-28a9-01ac8462f3ef" [ 784.193683] env[69982]: _type = "Task" [ 784.193683] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.203868] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994a2d-6d03-9687-28a9-01ac8462f3ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.366778] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.817s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.366778] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.372168] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.806s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.372713] env[69982]: DEBUG nova.objects.instance [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'resources' on Instance uuid 3e109fff-94bd-41a9-bc43-373143b7fda5 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 784.410957] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864445, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.673946} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.411423] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 784.411705] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 784.411987] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9dc0674-9236-43d6-a450-d2bf9d873d12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.422114] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 784.422114] env[69982]: value = "task-3864446" [ 784.422114] env[69982]: _type = "Task" [ 784.422114] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.432368] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.512499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.512499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.512795] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 784.519302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b7fec1-78e7-4274-8533-689685fed308 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.196s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.599739] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Releasing lock "refresh_cache-b5ad55cc-9010-46be-bfd0-28fa1607f1c3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.600062] env[69982]: DEBUG nova.compute.manager [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Received event network-changed-add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 784.600258] env[69982]: DEBUG nova.compute.manager [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Refreshing instance network info cache due to event network-changed-add5b64d-ceb9-4750-8e48-49d509a81e5f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 784.600488] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Acquiring lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.602490] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Acquired lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.602490] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Refreshing network info cache for port add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 784.689890] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d6300a-d75f-7cc1-8c01-710d987e7311, 'name': SearchDatastore_Task, 'duration_secs': 0.037982} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.690218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.690492] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 784.690761] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.707195] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994a2d-6d03-9687-28a9-01ac8462f3ef, 'name': SearchDatastore_Task, 'duration_secs': 0.030528} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.707195] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-456f313c-20a8-4928-bb4e-168a1f5e6128 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.714869] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 784.714869] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52184011-6a0a-1656-ee14-db38aaad8663" [ 784.714869] env[69982]: _type = "Task" [ 784.714869] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.729022] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52184011-6a0a-1656-ee14-db38aaad8663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.877138] env[69982]: DEBUG nova.compute.utils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 784.884376] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.884489] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.943098] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077727} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.943098] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 784.943517] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab430e9-8661-4ecf-bb28-19387ad16685 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.974065] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 784.975357] env[69982]: DEBUG nova.policy [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '114e89c3714d4ce39f4947a0aa567aba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5ba58648e534c55953c89a4eae7caf4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 784.978533] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad475076-967c-482e-a16b-f20f469552ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.003619] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 785.003619] env[69982]: value = "task-3864447" [ 785.003619] env[69982]: _type = "Task" [ 785.003619] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.021345] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864447, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.021701] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 785.091180] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 785.194986] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.195729] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.233664] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52184011-6a0a-1656-ee14-db38aaad8663, 'name': SearchDatastore_Task, 'duration_secs': 0.02159} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.235879] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.236284] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 785.248488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.248736] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 785.249056] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-210a4885-9682-4c35-9d29-0f935c22c37c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.251519] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6442aff-7812-40fb-9604-e7163e5d88ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.261060] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 785.261060] env[69982]: value = "task-3864448" [ 785.261060] env[69982]: _type = "Task" [ 785.261060] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.264398] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 785.264573] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 785.265616] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77b2afac-4b5b-40c2-bbd7-22773f830746 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.271883] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.276452] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 785.276452] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5253b9cb-4887-f6f4-287a-302737eef39d" [ 785.276452] env[69982]: _type = "Task" [ 785.276452] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.288824] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5253b9cb-4887-f6f4-287a-302737eef39d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.379066] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "48162423-a117-437e-b171-9a40c7c6f49b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.379599] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.379897] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "48162423-a117-437e-b171-9a40c7c6f49b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.380126] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.380313] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.383446] env[69982]: INFO nova.compute.manager [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Terminating instance [ 785.385171] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.520337] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864447, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.545895] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.578510] env[69982]: DEBUG nova.network.neutron [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Updating instance_info_cache with network_info: [{"id": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "address": "fa:16:3e:8e:50:9e", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43f402d8-57", "ovs_interfaceid": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.592748] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371edd15-e7b0-494c-929e-fc7eb1ebf95e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.601709] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5bf2ec-363e-4c5d-86fb-7d2295c7d538 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.649384] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e8f79d-b95c-40a3-a812-d0dba50209e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.663029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f116c16a-4ece-498e-bbd7-1eff757290d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.682508] env[69982]: DEBUG nova.compute.provider_tree [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.692267] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Received event network-vif-plugged-43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.692520] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Acquiring lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.692790] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.692994] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.693267] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] No waiting events found dispatching network-vif-plugged-43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 785.693434] env[69982]: WARNING nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Received unexpected event network-vif-plugged-43f402d8-57b0-4bf5-a153-b4dc7f97d98d for instance with vm_state building and task_state spawning. [ 785.693567] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.693775] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing instance network info cache due to event network-changed-29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.693948] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Acquiring lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.694148] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Acquired lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.697191] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Refreshing network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 785.771742] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864448, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.788093] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5253b9cb-4887-f6f4-287a-302737eef39d, 'name': SearchDatastore_Task, 'duration_secs': 0.017507} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.789326] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-313f65df-4ea7-4c81-8071-5c82dc503e1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.797748] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 785.797748] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4b868-39a6-110f-2cc7-7c299b41d4b2" [ 785.797748] env[69982]: _type = "Task" [ 785.797748] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.811959] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4b868-39a6-110f-2cc7-7c299b41d4b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.813249] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Successfully created port: 0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.897221] env[69982]: DEBUG nova.compute.manager [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 785.897221] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.897221] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a57e0f4c-a9ff-4754-8e49-14cc7347f44e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.906096] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.906096] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-734764ab-90d7-405b-8dad-371468b0e07e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.914975] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 785.914975] env[69982]: value = "task-3864449" [ 785.914975] env[69982]: _type = "Task" [ 785.914975] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.925942] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864449, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.988849] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updated VIF entry in instance network info cache for port add5b64d-ceb9-4750-8e48-49d509a81e5f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 785.989324] env[69982]: DEBUG nova.network.neutron [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updating instance_info_cache with network_info: [{"id": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "address": "fa:16:3e:5c:6b:a1", "network": {"id": "ef4d362f-fee5-4ed0-a85b-2cdf67ac5652", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-904295664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47175f0d16ed47f1bce853c13287bdc9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadd5b64d-ce", "ovs_interfaceid": "add5b64d-ceb9-4750-8e48-49d509a81e5f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.018685] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864447, 'name': ReconfigVM_Task, 'duration_secs': 0.670137} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.018967] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 786.020040] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-409a5a0d-7bfd-4cb9-9b0c-600e89510e14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.029860] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 786.029860] env[69982]: value = "task-3864450" [ 786.029860] env[69982]: _type = "Task" [ 786.029860] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.040732] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864450, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.082251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.082251] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance network_info: |[{"id": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "address": "fa:16:3e:8e:50:9e", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43f402d8-57", "ovs_interfaceid": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.082612] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:50:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43f402d8-57b0-4bf5-a153-b4dc7f97d98d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.090409] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.091117] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.091405] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-226af65e-7c75-44be-9f42-cc41d6ea094b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.116415] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.116415] env[69982]: value = "task-3864451" [ 786.116415] env[69982]: _type = "Task" [ 786.116415] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.126624] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864451, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.188226] env[69982]: DEBUG nova.scheduler.client.report [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.275025] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864448, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.309015] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4b868-39a6-110f-2cc7-7c299b41d4b2, 'name': SearchDatastore_Task, 'duration_secs': 0.058721} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.309317] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.309577] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] b5ad55cc-9010-46be-bfd0-28fa1607f1c3/b5ad55cc-9010-46be-bfd0-28fa1607f1c3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 786.309852] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89743ca0-0c6a-4e00-99f0-c0e1324f9f6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.318437] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 786.318437] env[69982]: value = "task-3864452" [ 786.318437] env[69982]: _type = "Task" [ 786.318437] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.326481] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.397610] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.426909] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864449, 'name': PowerOffVM_Task, 'duration_secs': 0.286754} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.427191] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 786.427357] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 786.427610] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3b40af4-c0e3-494a-a4f0-ae176d9767be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.431577] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 786.431813] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.431968] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.432162] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.432307] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.432451] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 786.432661] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 786.432818] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 786.432982] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 786.433156] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 786.433330] env[69982]: DEBUG nova.virt.hardware [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 786.434419] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51206919-1955-4a24-8a4c-c2182b9aa754 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.442835] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2b2b43-9194-4ef6-acbe-9739d1f7ccde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.493773] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd59ef7f-a6ba-4150-af0e-502ba9e59e7d req-444e87aa-99cd-4778-bcc2-5ab240a5b93d service nova] Releasing lock "refresh_cache-7af5a14d-f586-4746-9831-8be255581637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.515425] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 786.515787] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 786.516073] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleting the datastore file [datastore2] 48162423-a117-437e-b171-9a40c7c6f49b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.519556] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb5bf1f9-6e2a-4c54-9a51-115f500bec42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.531344] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 786.531344] env[69982]: value = "task-3864454" [ 786.531344] env[69982]: _type = "Task" [ 786.531344] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.545813] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864450, 'name': Rename_Task, 'duration_secs': 0.213168} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.549263] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 786.549666] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.549893] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca7a8492-63c5-4abb-99cb-edd00536b786 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.556914] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 786.556914] env[69982]: value = "task-3864455" [ 786.556914] env[69982]: _type = "Task" [ 786.556914] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.565933] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.627356] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864451, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.698492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.326s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.705339] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.680s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.711070] env[69982]: INFO nova.compute.claims [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.745190] env[69982]: INFO nova.scheduler.client.report [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance 3e109fff-94bd-41a9-bc43-373143b7fda5 [ 786.773165] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864448, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.829514] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864452, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.986735] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updated VIF entry in instance network info cache for port 29f8b8ab-a86a-44eb-86e6-495ca7006221. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 786.987184] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [{"id": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "address": "fa:16:3e:33:eb:16", "network": {"id": "9d3634c6-ef88-4afd-bda3-d10c182dbde3", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1019415306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "344ff7edee7c427bbbdb29e1a8c91a46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6815237d-f565-474d-a3c0-9c675478eb00", "external-id": "nsx-vlan-transportzone-526", "segmentation_id": 526, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29f8b8ab-a8", "ovs_interfaceid": "29f8b8ab-a86a-44eb-86e6-495ca7006221", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.047445] env[69982]: DEBUG oslo_vmware.api [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400234} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.051026] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.051026] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 787.051026] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.051026] env[69982]: INFO nova.compute.manager [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 787.051026] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.051393] env[69982]: DEBUG nova.compute.manager [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 787.051393] env[69982]: DEBUG nova.network.neutron [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 787.068657] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864455, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.128143] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864451, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.258981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9e8d4fb9-6a98-4f1a-9e9d-f58a084555f0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "3e109fff-94bd-41a9-bc43-373143b7fda5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.284s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.274384] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864448, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.670546} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.274384] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.274572] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.275633] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44dc9378-fb7c-470e-923d-34411ea14494 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.285262] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 787.285262] env[69982]: value = "task-3864456" [ 787.285262] env[69982]: _type = "Task" [ 787.285262] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.297136] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864456, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.333589] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864452, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.492939] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Releasing lock "refresh_cache-48162423-a117-437e-b171-9a40c7c6f49b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.493400] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Received event network-changed-43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.493400] env[69982]: DEBUG nova.compute.manager [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Refreshing instance network info cache due to event network-changed-43f402d8-57b0-4bf5-a153-b4dc7f97d98d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.493975] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Acquiring lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.494621] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Acquired lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.494621] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Refreshing network info cache for port 43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 787.572152] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864455, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.632180] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864451, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.717144] env[69982]: DEBUG nova.compute.manager [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Received event network-vif-plugged-0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.717144] env[69982]: DEBUG oslo_concurrency.lockutils [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] Acquiring lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.718034] env[69982]: DEBUG oslo_concurrency.lockutils [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.718388] env[69982]: DEBUG oslo_concurrency.lockutils [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.718860] env[69982]: DEBUG nova.compute.manager [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] No waiting events found dispatching network-vif-plugged-0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 787.719370] env[69982]: WARNING nova.compute.manager [req-daa21ee6-2c8e-41df-879d-265a0ff2bfdf req-a31b0d73-9cb6-4ad0-8d8d-173562a02383 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Received unexpected event network-vif-plugged-0e9d6238-fb82-48aa-8702-091435aae1b1 for instance with vm_state building and task_state spawning. [ 787.728169] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Successfully updated port: 0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.795540] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864456, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092015} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.798234] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 787.799393] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa235a58-de5d-4f53-b576-c6f0e7cbafe8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.824544] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 787.828346] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-985bd689-48e4-4962-b2e5-136ff99909ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.857277] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864452, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.195229} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.858656] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] b5ad55cc-9010-46be-bfd0-28fa1607f1c3/b5ad55cc-9010-46be-bfd0-28fa1607f1c3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 787.858874] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 787.859208] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 787.859208] env[69982]: value = "task-3864457" [ 787.859208] env[69982]: _type = "Task" [ 787.859208] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.859425] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfef230f-585a-4413-a564-6d82ec74c0b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.883439] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.885182] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 787.885182] env[69982]: value = "task-3864458" [ 787.885182] env[69982]: _type = "Task" [ 787.885182] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.900078] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864458, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.971159] env[69982]: DEBUG nova.compute.manager [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Received event network-changed-c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.974588] env[69982]: DEBUG nova.compute.manager [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Refreshing instance network info cache due to event network-changed-c816f035-7c9d-47ba-8b3b-29a57ec10561. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.974588] env[69982]: DEBUG oslo_concurrency.lockutils [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.974588] env[69982]: DEBUG oslo_concurrency.lockutils [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.974871] env[69982]: DEBUG nova.network.neutron [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Refreshing network info cache for port c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 788.069429] env[69982]: DEBUG oslo_vmware.api [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864455, 'name': PowerOnVM_Task, 'duration_secs': 1.468967} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.069731] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 788.069952] env[69982]: INFO nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Took 11.35 seconds to spawn the instance on the hypervisor. [ 788.070230] env[69982]: DEBUG nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 788.071075] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f36aabcd-223f-42da-9ed9-080d333be24b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.135851] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864451, 'name': CreateVM_Task, 'duration_secs': 1.535822} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.136051] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 788.136774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.137322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.137409] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 788.137764] env[69982]: DEBUG nova.network.neutron [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.139255] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f921af8-ef41-4e1c-88a7-843077066569 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.149609] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 788.149609] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a04b53-7c6e-6b94-1fb8-04ad98e3019c" [ 788.149609] env[69982]: _type = "Task" [ 788.149609] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.163087] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a04b53-7c6e-6b94-1fb8-04ad98e3019c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.229174] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.230113] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.230348] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 788.335039] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Updated VIF entry in instance network info cache for port 43f402d8-57b0-4bf5-a153-b4dc7f97d98d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 788.335433] env[69982]: DEBUG nova.network.neutron [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Updating instance_info_cache with network_info: [{"id": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "address": "fa:16:3e:8e:50:9e", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap43f402d8-57", "ovs_interfaceid": "43f402d8-57b0-4bf5-a153-b4dc7f97d98d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.378873] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864457, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.401065] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864458, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087692} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.402289] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.402492] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d2fdf4-349a-40eb-82ec-b460c75cd89c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.432597] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] b5ad55cc-9010-46be-bfd0-28fa1607f1c3/b5ad55cc-9010-46be-bfd0-28fa1607f1c3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.434572] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c810e71a-af37-4392-ad82-fb6400c919f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.450173] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d511bc-bac6-4c3d-ae35-930aa1eb5ddc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.459523] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30c23d2c-1efa-4f3b-b186-0c376e63eb52 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.464382] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 788.464382] env[69982]: value = "task-3864459" [ 788.464382] env[69982]: _type = "Task" [ 788.464382] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.499968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416504f7-9c0a-4ec7-92b5-b23ba4761dac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.506767] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.512732] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d8ec41-678a-41b6-ba2b-96f03bcd1402 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.528957] env[69982]: DEBUG nova.compute.provider_tree [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.606288] env[69982]: INFO nova.compute.manager [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Took 44.64 seconds to build instance. [ 788.645493] env[69982]: INFO nova.compute.manager [-] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Took 1.60 seconds to deallocate network for instance. [ 788.665967] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a04b53-7c6e-6b94-1fb8-04ad98e3019c, 'name': SearchDatastore_Task, 'duration_secs': 0.026434} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.666713] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.666860] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.667355] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.667652] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.667967] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.668288] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7074e656-7456-4731-8987-66bb54a835c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.682699] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.682894] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.683692] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8bf7dd6-c6e0-4857-a9d9-250f973cd355 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.691905] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 788.691905] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a83b4b-701f-4ca5-e466-6965b6f713ef" [ 788.691905] env[69982]: _type = "Task" [ 788.691905] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.704268] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a83b4b-701f-4ca5-e466-6965b6f713ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.797236] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 788.839726] env[69982]: DEBUG oslo_concurrency.lockutils [req-45047fd2-3e25-4766-b54c-c6f387d74ece req-a4f0a121-2cfb-4aa2-a0f4-75be6b4ba4e5 service nova] Releasing lock "refresh_cache-d5c23433-a0f3-4f0a-9c62-051d07dcd712" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.868587] env[69982]: DEBUG nova.network.neutron [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updated VIF entry in instance network info cache for port c816f035-7c9d-47ba-8b3b-29a57ec10561. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 788.869119] env[69982]: DEBUG nova.network.neutron [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.883243] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864457, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.986279] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.032766] env[69982]: DEBUG nova.scheduler.client.report [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.084298] env[69982]: DEBUG nova.network.neutron [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Updating instance_info_cache with network_info: [{"id": "0e9d6238-fb82-48aa-8702-091435aae1b1", "address": "fa:16:3e:39:e1:01", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e9d6238-fb", "ovs_interfaceid": "0e9d6238-fb82-48aa-8702-091435aae1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.112126] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e657f0cb-1ccd-4695-8e2d-52a60338083d tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.147s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.158593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.207135] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a83b4b-701f-4ca5-e466-6965b6f713ef, 'name': SearchDatastore_Task, 'duration_secs': 0.016535} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.207135] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d13d948e-3a4f-4132-82f6-8ce121637cbe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.217968] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 789.217968] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5299f348-e470-e21d-0038-a846729b9183" [ 789.217968] env[69982]: _type = "Task" [ 789.217968] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.231273] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5299f348-e470-e21d-0038-a846729b9183, 'name': SearchDatastore_Task, 'duration_secs': 0.011698} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.231703] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.232067] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.232388] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95bb75c8-d4e1-44e3-8dcf-e728e7b032ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.240557] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 789.240557] env[69982]: value = "task-3864460" [ 789.240557] env[69982]: _type = "Task" [ 789.240557] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.250617] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.377469] env[69982]: DEBUG oslo_concurrency.lockutils [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.377795] env[69982]: DEBUG nova.compute.manager [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Received event network-vif-deleted-29f8b8ab-a86a-44eb-86e6-495ca7006221 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 789.377999] env[69982]: INFO nova.compute.manager [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Neutron deleted interface 29f8b8ab-a86a-44eb-86e6-495ca7006221; detaching it from the instance and deleting it from the info cache [ 789.378637] env[69982]: DEBUG nova.network.neutron [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.388288] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864457, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.477465] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.539357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.834s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.539934] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 789.542797] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.559s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.543050] env[69982]: DEBUG nova.objects.instance [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lazy-loading 'resources' on Instance uuid bd242bac-cd36-4fff-9325-fc14d5ceb566 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 789.587865] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.588274] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance network_info: |[{"id": "0e9d6238-fb82-48aa-8702-091435aae1b1", "address": "fa:16:3e:39:e1:01", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e9d6238-fb", "ovs_interfaceid": "0e9d6238-fb82-48aa-8702-091435aae1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 789.589157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:e1:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e9d6238-fb82-48aa-8702-091435aae1b1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.598074] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating folder: Project (b5ba58648e534c55953c89a4eae7caf4). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.599228] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26430146-8b6b-4134-b301-b7932f4daee2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.612591] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created folder: Project (b5ba58648e534c55953c89a4eae7caf4) in parent group-v767796. [ 789.612701] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating folder: Instances. Parent ref: group-v767919. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.613426] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-364cbd8a-7183-499d-928f-1f1ba0acceb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.615798] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 789.629420] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created folder: Instances in parent group-v767919. [ 789.631950] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.631950] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.631950] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbb6ebf4-8200-4d7e-9c29-71959cfe62e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.660835] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.660835] env[69982]: value = "task-3864463" [ 789.660835] env[69982]: _type = "Task" [ 789.660835] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.674733] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864463, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.756316] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864460, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.774199] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.774565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.883950] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864457, 'name': ReconfigVM_Task, 'duration_secs': 1.651103} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.884101] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7227e805-3f34-4a40-b8a3-f18cee0750b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.886249] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Reconfigured VM instance instance-00000024 to attach disk [datastore1] d73153ad-9258-4c3c-9699-b6364408d631/d73153ad-9258-4c3c-9699-b6364408d631.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.886829] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-866986ce-788a-4d04-9900-dc36c8c92703 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.899440] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5fbc39-4a88-47b2-b332-b168d8506678 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.910889] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 789.910889] env[69982]: value = "task-3864464" [ 789.910889] env[69982]: _type = "Task" [ 789.910889] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.921906] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864464, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.937857] env[69982]: DEBUG nova.compute.manager [req-65e6a44e-cfb6-47a7-8b94-aa65d6489dce req-43bbb962-8184-44c2-a084-5f75483726a7 service nova] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Detach interface failed, port_id=29f8b8ab-a86a-44eb-86e6-495ca7006221, reason: Instance 48162423-a117-437e-b171-9a40c7c6f49b could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 789.978242] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864459, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.053482] env[69982]: DEBUG nova.compute.utils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.054895] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 790.055071] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 790.103857] env[69982]: DEBUG nova.compute.manager [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Received event network-changed-0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.103857] env[69982]: DEBUG nova.compute.manager [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Refreshing instance network info cache due to event network-changed-0e9d6238-fb82-48aa-8702-091435aae1b1. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 790.103857] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] Acquiring lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.103857] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] Acquired lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.104371] env[69982]: DEBUG nova.network.neutron [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Refreshing network info cache for port 0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 790.127432] env[69982]: DEBUG nova.policy [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '114e89c3714d4ce39f4947a0aa567aba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5ba58648e534c55953c89a4eae7caf4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 790.145719] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.174244] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864463, 'name': CreateVM_Task, 'duration_secs': 0.389564} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.177280] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.178838] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.179345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.183612] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 790.183612] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35324457-6460-47e8-b3d0-f1776d27c343 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.191046] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 790.191046] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cd033a-c19a-d186-f2bb-746f61d0187b" [ 790.191046] env[69982]: _type = "Task" [ 790.191046] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.208238] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cd033a-c19a-d186-f2bb-746f61d0187b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.253137] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864460, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545464} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.258098] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.258442] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.259264] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d3b74f5-3631-4090-a4e0-dc0b53740c61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.270030] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 790.270030] env[69982]: value = "task-3864465" [ 790.270030] env[69982]: _type = "Task" [ 790.270030] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.278081] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864465, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.426460] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864464, 'name': Rename_Task, 'duration_secs': 0.201398} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.426846] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.427193] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2deddbf-27f4-4c60-91c4-95c0c2da0b04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.441614] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 790.441614] env[69982]: value = "task-3864466" [ 790.441614] env[69982]: _type = "Task" [ 790.441614] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.464700] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.482822] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864459, 'name': ReconfigVM_Task, 'duration_secs': 1.541008} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.484829] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Reconfigured VM instance instance-00000028 to attach disk [datastore1] b5ad55cc-9010-46be-bfd0-28fa1607f1c3/b5ad55cc-9010-46be-bfd0-28fa1607f1c3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.490036] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e9d3719-6301-429c-bee0-7517d57bcd04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.497292] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 790.497292] env[69982]: value = "task-3864467" [ 790.497292] env[69982]: _type = "Task" [ 790.497292] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.512821] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864467, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.556760] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Successfully created port: 3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.559202] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 790.704034] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cd033a-c19a-d186-f2bb-746f61d0187b, 'name': SearchDatastore_Task, 'duration_secs': 0.013495} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.704503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.705088] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.705088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.705088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.705372] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.705704] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96cb4064-f621-4cfa-87a2-65e863fc4ca7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.717967] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.718377] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.720789] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e6fcb9-be5a-4803-b7c7-ab5cd3750fa1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.732053] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 790.732053] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5293ad72-b65e-2a7b-cfbc-c237ea343ed9" [ 790.732053] env[69982]: _type = "Task" [ 790.732053] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.751339] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.751626] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.751901] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5293ad72-b65e-2a7b-cfbc-c237ea343ed9, 'name': SearchDatastore_Task, 'duration_secs': 0.012521} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.752746] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0819250-1727-4af1-b779-f9f590af4d2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.761018] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 790.761018] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ebf52a-5452-08ac-5ec4-09c22a28273f" [ 790.761018] env[69982]: _type = "Task" [ 790.761018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.780101] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ebf52a-5452-08ac-5ec4-09c22a28273f, 'name': SearchDatastore_Task, 'duration_secs': 0.012164} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.780101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.780101] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 790.780101] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f84d4ab6-c85d-4e26-a1be-acb381008a6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.785104] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864465, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.216098} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.787163] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.794064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fb1591-4adc-4dbf-988d-8d272d0f204a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.798948] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 790.798948] env[69982]: value = "task-3864468" [ 790.798948] env[69982]: _type = "Task" [ 790.798948] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.815611] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba01959-f399-48fb-96e3-f761f3564e75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.827222] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.831664] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1aa66d4-e0a2-476b-a56c-7ef695a8d226 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.851649] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864468, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.855846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c06d2bf-11dc-4978-ac81-b710081cea22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.860947] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 790.860947] env[69982]: value = "task-3864469" [ 790.860947] env[69982]: _type = "Task" [ 790.860947] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.891705] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b1705d-64ad-48f2-941d-eebb71e15836 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.899088] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.908345] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775ac14a-d5bd-4b56-ae74-a317fa77fdcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.924326] env[69982]: DEBUG nova.compute.provider_tree [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 790.955752] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.009665] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864467, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.054018] env[69982]: DEBUG nova.network.neutron [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Updated VIF entry in instance network info cache for port 0e9d6238-fb82-48aa-8702-091435aae1b1. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 791.054844] env[69982]: DEBUG nova.network.neutron [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Updating instance_info_cache with network_info: [{"id": "0e9d6238-fb82-48aa-8702-091435aae1b1", "address": "fa:16:3e:39:e1:01", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0e9d6238-fb", "ovs_interfaceid": "0e9d6238-fb82-48aa-8702-091435aae1b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.310008] env[69982]: DEBUG nova.compute.manager [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 791.320326] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864468, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.374310] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.429561] env[69982]: DEBUG nova.scheduler.client.report [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.455720] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.512357] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864467, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.559370] env[69982]: DEBUG oslo_concurrency.lockutils [req-c4ca029a-5bbe-45ac-9f64-2c7044d91d95 req-1f042838-9440-4326-9144-43b2aa884252 service nova] Releasing lock "refresh_cache-9123b08c-d2ec-4c4d-bade-0acdae75640a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.578592] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 791.603562] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 791.604116] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.604432] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 791.604728] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.604972] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 791.605266] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 791.605594] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 791.605903] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 791.606226] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 791.606504] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 791.606814] env[69982]: DEBUG nova.virt.hardware [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 791.608038] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b405dd-927f-4b04-ba1c-3b0c960ecc47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.622023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c5af09-67e3-4aad-b3fa-3e985a2c0765 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.811067] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864468, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555867} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.811576] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 791.812153] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 791.812571] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07893153-bd48-4841-a104-39efeb399c73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.824850] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 791.824850] env[69982]: value = "task-3864470" [ 791.824850] env[69982]: _type = "Task" [ 791.824850] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.836500] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864470, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.843836] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.873919] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864469, 'name': ReconfigVM_Task, 'duration_secs': 0.709312} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.874523] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Reconfigured VM instance instance-00000029 to attach disk [datastore1] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.875387] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90afa54e-0c1f-4ad3-be89-67ad6ffc767c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.885513] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 791.885513] env[69982]: value = "task-3864471" [ 791.885513] env[69982]: _type = "Task" [ 791.885513] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.896718] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864471, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.933792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.391s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 791.938393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.185s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.940099] env[69982]: INFO nova.compute.claims [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.957453] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.967328] env[69982]: INFO nova.scheduler.client.report [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Deleted allocations for instance bd242bac-cd36-4fff-9325-fc14d5ceb566 [ 792.020217] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864467, 'name': Rename_Task, 'duration_secs': 1.292726} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.020946] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.020946] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6de3408e-250e-4b51-835a-b1a226a525a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.030214] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 792.030214] env[69982]: value = "task-3864472" [ 792.030214] env[69982]: _type = "Task" [ 792.030214] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.040850] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.121280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.121804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.294662] env[69982]: DEBUG nova.compute.manager [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Received event network-vif-plugged-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.294883] env[69982]: DEBUG oslo_concurrency.lockutils [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] Acquiring lock "d2684194-a688-4466-9852-1f4ff656f057-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.296498] env[69982]: DEBUG oslo_concurrency.lockutils [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] Lock "d2684194-a688-4466-9852-1f4ff656f057-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.299165] env[69982]: DEBUG oslo_concurrency.lockutils [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] Lock "d2684194-a688-4466-9852-1f4ff656f057-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.299165] env[69982]: DEBUG nova.compute.manager [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] No waiting events found dispatching network-vif-plugged-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.299165] env[69982]: WARNING nova.compute.manager [req-366bb3df-3822-4eea-8739-55def0422fc1 req-8d9e1641-3dad-487a-932e-69b136520f78 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Received unexpected event network-vif-plugged-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 for instance with vm_state building and task_state spawning. [ 792.336049] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864470, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.238366} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.336431] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 792.336963] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72fc7af0-49a2-46cd-bdf5-e03b00404d32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.361207] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 792.361570] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8dc64491-875f-43d9-bbac-6c16ddb108ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.384146] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 792.384146] env[69982]: value = "task-3864473" [ 792.384146] env[69982]: _type = "Task" [ 792.384146] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.396945] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.403268] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864471, 'name': Rename_Task, 'duration_secs': 0.466811} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.404410] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Successfully updated port: 3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.405600] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.406074] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12023289-d553-4325-8315-9d84ac43f619 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.415300] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 792.415300] env[69982]: value = "task-3864474" [ 792.415300] env[69982]: _type = "Task" [ 792.415300] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.427462] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.461439] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.477570] env[69982]: DEBUG oslo_concurrency.lockutils [None req-384404d0-58af-4c6b-a482-ffdc578eeddb tempest-InstanceActionsNegativeTestJSON-342351221 tempest-InstanceActionsNegativeTestJSON-342351221-project-member] Lock "bd242bac-cd36-4fff-9325-fc14d5ceb566" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.016s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.488022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.488022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.543022] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864472, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.897470] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864473, 'name': ReconfigVM_Task, 'duration_secs': 0.337015} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.897896] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 792.898857] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7a0f7a00-4e6d-49cb-8d6e-7f3512c96edd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.907077] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.907252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.907373] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 792.908792] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 792.908792] env[69982]: value = "task-3864475" [ 792.908792] env[69982]: _type = "Task" [ 792.908792] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.932101] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864474, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.971226] env[69982]: DEBUG oslo_vmware.api [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864466, 'name': PowerOnVM_Task, 'duration_secs': 2.12065} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.971487] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.972584] env[69982]: DEBUG nova.compute.manager [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.972671] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a47e4a4-cccb-44a0-8172-8f44a619ed93 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.048698] env[69982]: DEBUG oslo_vmware.api [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864472, 'name': PowerOnVM_Task, 'duration_secs': 0.876211} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.048698] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.048698] env[69982]: INFO nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Took 13.33 seconds to spawn the instance on the hypervisor. [ 793.048955] env[69982]: DEBUG nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.050032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dac66d-00de-4547-8cea-cbadfac6e60f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.430533] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864475, 'name': Rename_Task, 'duration_secs': 0.196492} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.431544] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 793.431859] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-445fadc7-23a1-4724-8bda-0da4c352ab16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.438703] env[69982]: DEBUG oslo_vmware.api [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864474, 'name': PowerOnVM_Task, 'duration_secs': 0.62259} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.439400] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 793.439617] env[69982]: INFO nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Took 10.89 seconds to spawn the instance on the hypervisor. [ 793.439798] env[69982]: DEBUG nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.440655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a463e5cf-8f52-4466-a391-7632f832aa84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.448261] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 793.448261] env[69982]: value = "task-3864476" [ 793.448261] env[69982]: _type = "Task" [ 793.448261] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.462716] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.494068] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.551848] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 793.578370] env[69982]: INFO nova.compute.manager [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Took 47.24 seconds to build instance. [ 793.595013] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26331d4-dfe0-484a-b80f-06b03becae83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.612259] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51d9c10-591f-4468-af5b-91861292a645 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.659294] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453dca4b-4b56-43d1-bb68-0e04421cee76 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.672943] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c45e68-4211-4bcd-9219-374b269fbc71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.689724] env[69982]: DEBUG nova.compute.provider_tree [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.819984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "d73153ad-9258-4c3c-9699-b6364408d631" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.820324] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.820551] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "d73153ad-9258-4c3c-9699-b6364408d631-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.820737] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.820977] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.829018] env[69982]: INFO nova.compute.manager [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Terminating instance [ 793.877461] env[69982]: DEBUG nova.network.neutron [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Updating instance_info_cache with network_info: [{"id": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "address": "fa:16:3e:37:2a:71", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b50b2f7-54", "ovs_interfaceid": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.965834] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864476, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.970680] env[69982]: INFO nova.compute.manager [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Took 47.57 seconds to build instance. [ 794.082118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c34ac939-3969-4f9d-bf2d-633401261102 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.944s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.193074] env[69982]: DEBUG nova.scheduler.client.report [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.330758] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "refresh_cache-d73153ad-9258-4c3c-9699-b6364408d631" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.330955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquired lock "refresh_cache-d73153ad-9258-4c3c-9699-b6364408d631" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.331451] env[69982]: DEBUG nova.network.neutron [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 794.381696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.382102] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Instance network_info: |[{"id": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "address": "fa:16:3e:37:2a:71", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b50b2f7-54", "ovs_interfaceid": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.386022] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:2a:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.391548] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.391831] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.392013] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e37c3888-a813-404b-9741-0ddd2832b1e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.415312] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.415312] env[69982]: value = "task-3864477" [ 794.415312] env[69982]: _type = "Task" [ 794.415312] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.425405] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864477, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.461746] env[69982]: DEBUG oslo_vmware.api [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864476, 'name': PowerOnVM_Task, 'duration_secs': 0.59689} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.463129] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.463338] env[69982]: INFO nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Took 8.07 seconds to spawn the instance on the hypervisor. [ 794.463517] env[69982]: DEBUG nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.468274] env[69982]: DEBUG nova.compute.manager [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Received event network-changed-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.468274] env[69982]: DEBUG nova.compute.manager [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Refreshing instance network info cache due to event network-changed-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.468274] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] Acquiring lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.468274] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] Acquired lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.468274] env[69982]: DEBUG nova.network.neutron [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Refreshing network info cache for port 3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 794.468423] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6abbefc-40a5-4a9c-8e11-841a1254bf3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.472146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-daa110c9-2a08-4329-ad8b-144906b0b150 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.094s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.586858] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 794.678008] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.678826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.679151] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.679377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.679652] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.682681] env[69982]: INFO nova.compute.manager [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Terminating instance [ 794.699898] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.764s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.700517] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 794.703065] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.616s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.703273] env[69982]: DEBUG nova.objects.instance [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 794.868178] env[69982]: DEBUG nova.network.neutron [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 794.927172] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864477, 'name': CreateVM_Task, 'duration_secs': 0.36778} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.927379] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 794.928277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.928475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.928817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 794.929116] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ad4226d-e6ba-4fd4-a7c4-388895ec5ac3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.936664] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 794.936664] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52843444-5c27-e76e-5608-2ce8ac562f27" [ 794.936664] env[69982]: _type = "Task" [ 794.936664] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.947308] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52843444-5c27-e76e-5608-2ce8ac562f27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.960426] env[69982]: DEBUG nova.network.neutron [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.981709] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 794.996707] env[69982]: INFO nova.compute.manager [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Took 38.82 seconds to build instance. [ 795.112704] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.186615] env[69982]: DEBUG nova.compute.manager [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 795.186863] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.187790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9861936-1358-4199-925b-4f659f9a5608 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.197593] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.198049] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2452c61-02ae-4dc3-9b0e-253558d0a9b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.205504] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 795.205504] env[69982]: value = "task-3864478" [ 795.205504] env[69982]: _type = "Task" [ 795.205504] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.216041] env[69982]: DEBUG nova.compute.utils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.220487] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 795.228975] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.304446] env[69982]: DEBUG nova.network.neutron [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Updated VIF entry in instance network info cache for port 3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 795.304844] env[69982]: DEBUG nova.network.neutron [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Updating instance_info_cache with network_info: [{"id": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "address": "fa:16:3e:37:2a:71", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b50b2f7-54", "ovs_interfaceid": "3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.456632] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52843444-5c27-e76e-5608-2ce8ac562f27, 'name': SearchDatastore_Task, 'duration_secs': 0.011052} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.457758] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.457758] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.458046] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.458256] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.458572] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.458985] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3449a4f5-e0be-4fc0-b1ef-e4fd33936b37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.463074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Releasing lock "refresh_cache-d73153ad-9258-4c3c-9699-b6364408d631" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.463673] env[69982]: DEBUG nova.compute.manager [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 795.463972] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.466249] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cec8b6e-cf79-43ef-9f37-35e52981363b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.477944] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.478460] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00e1f525-26f6-4fb0-a91b-edff3b718731 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.484916] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.485226] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.486531] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0d476f5-4559-4533-9b8b-2e1930b5e2fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.494840] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 795.494840] env[69982]: value = "task-3864479" [ 795.494840] env[69982]: _type = "Task" [ 795.494840] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.499337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-46cf50ca-076f-4bcf-9712-a6746696d1e9 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.773s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.509858] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 795.509858] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f566b9-05ba-a36e-f7ad-8f36797c764e" [ 795.509858] env[69982]: _type = "Task" [ 795.509858] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.517178] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.520683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.524769] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f566b9-05ba-a36e-f7ad-8f36797c764e, 'name': SearchDatastore_Task, 'duration_secs': 0.012942} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.525695] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c7954ed-af86-4a2d-bbdf-f54bbe72e16f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.532856] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 795.532856] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e5028c-9cfa-7eb0-0488-b8a30feadd7e" [ 795.532856] env[69982]: _type = "Task" [ 795.532856] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.542870] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e5028c-9cfa-7eb0-0488-b8a30feadd7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.588389] env[69982]: INFO nova.compute.manager [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Rebuilding instance [ 795.645827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "5743a020-0c09-45ec-aca4-5ce367cc201a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.646604] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.647661] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.647860] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.649277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.654036] env[69982]: INFO nova.compute.manager [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Terminating instance [ 795.670276] env[69982]: DEBUG nova.compute.manager [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 795.672013] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79896d38-170f-496a-a956-d0ba7eaed10c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.718690] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864478, 'name': PowerOffVM_Task, 'duration_secs': 0.28153} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.720716] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 795.720716] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 795.720716] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2f9f5f4-df4a-4f34-8e32-a5c61f90fdfb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.722753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2431eaec-b8bb-4b66-aa94-71c72b3b496b tempest-ServersAdmin275Test-970373879 tempest-ServersAdmin275Test-970373879-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.724036] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 795.729072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.426s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.729325] env[69982]: DEBUG nova.objects.instance [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lazy-loading 'resources' on Instance uuid 67f59d53-c61b-48ad-b55d-710595e9dae3 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.801501] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.801768] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.801917] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Deleting the datastore file [datastore2] ad0c405f-48c8-4726-8e95-eb83a6e158fe {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.802231] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ff3145f-3cd5-4e93-9611-9ba5a08f2aa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.808224] env[69982]: DEBUG oslo_concurrency.lockutils [req-0e49d84e-9627-417b-bbe9-e5042f5fd72e req-52874363-63b2-4b15-be99-b51e4980a8c6 service nova] Releasing lock "refresh_cache-d2684194-a688-4466-9852-1f4ff656f057" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.812198] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for the task: (returnval){ [ 795.812198] env[69982]: value = "task-3864481" [ 795.812198] env[69982]: _type = "Task" [ 795.812198] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.825316] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.009877] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864479, 'name': PowerOffVM_Task, 'duration_secs': 0.246512} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.010236] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.010410] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.010795] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 796.013373] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79282618-a125-469c-a093-137bfedf8300 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.048581] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e5028c-9cfa-7eb0-0488-b8a30feadd7e, 'name': SearchDatastore_Task, 'duration_secs': 0.012267} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.048845] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.049063] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.049215] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Deleting the datastore file [datastore1] d73153ad-9258-4c3c-9699-b6364408d631 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.049531] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.049743] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d2684194-a688-4466-9852-1f4ff656f057/d2684194-a688-4466-9852-1f4ff656f057.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.049973] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0afa2dd-5de8-4540-a894-da86a6bab00b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.051970] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2f6cb32-abc7-44a8-98f2-692b24768e18 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.061882] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 796.061882] env[69982]: value = "task-3864483" [ 796.061882] env[69982]: _type = "Task" [ 796.061882] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.062658] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for the task: (returnval){ [ 796.062658] env[69982]: value = "task-3864484" [ 796.062658] env[69982]: _type = "Task" [ 796.062658] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.076935] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.081404] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864484, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.160072] env[69982]: DEBUG nova.compute.manager [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.160451] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.161547] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071f79aa-a2f9-4d61-a61a-306f8454d4f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.171466] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.171812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be8122ca-fd56-45c5-81f7-7330ea8a061c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.180229] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 796.180229] env[69982]: value = "task-3864485" [ 796.180229] env[69982]: _type = "Task" [ 796.180229] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.194579] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.330429] env[69982]: DEBUG oslo_vmware.api [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Task: {'id': task-3864481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185233} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.330429] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.330666] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 796.330874] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.331160] env[69982]: INFO nova.compute.manager [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Took 1.14 seconds to destroy the instance on the hypervisor. [ 796.332441] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.332441] env[69982]: DEBUG nova.compute.manager [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 796.332441] env[69982]: DEBUG nova.network.neutron [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 796.544421] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.577873] env[69982]: DEBUG oslo_vmware.api [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Task: {'id': task-3864484, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179024} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.584477] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.584826] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 796.585052] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.585258] env[69982]: INFO nova.compute.manager [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Took 1.12 seconds to destroy the instance on the hypervisor. [ 796.585595] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.585899] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864483, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.586539] env[69982]: DEBUG nova.compute.manager [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 796.586693] env[69982]: DEBUG nova.network.neutron [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 796.610359] env[69982]: DEBUG nova.network.neutron [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 796.699049] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.699393] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864485, 'name': PowerOffVM_Task, 'duration_secs': 0.32103} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.702336] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d0f0687-641d-49ad-a6f9-e39e49700c56 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.704398] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.704653] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.704975] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eed6989-66fc-4d70-8c20-58e16266a574 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.715522] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 796.715522] env[69982]: value = "task-3864487" [ 796.715522] env[69982]: _type = "Task" [ 796.715522] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.730706] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.746567] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 796.787084] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.787358] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.787560] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Deleting the datastore file [datastore1] 5743a020-0c09-45ec-aca4-5ce367cc201a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.793505] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 796.793784] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.793951] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 796.794220] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.794445] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 796.794608] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 796.794819] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 796.794977] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 796.795163] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 796.795329] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 796.795658] env[69982]: DEBUG nova.virt.hardware [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 796.795885] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9b2c2f7-2a1b-49e3-b67a-e225b1e7a3bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.798548] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26350b83-3347-4c1e-a002-c397086f986c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.810234] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6933e146-d9a9-449e-932e-c799d627efa5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.814981] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for the task: (returnval){ [ 796.814981] env[69982]: value = "task-3864488" [ 796.814981] env[69982]: _type = "Task" [ 796.814981] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.831846] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.838282] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Creating folder: Project (752a1a2548614dd99e7745f15bacc208). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.839895] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb9a40c3-036a-4b15-b8bb-4080077885e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.845323] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864488, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.855982] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Created folder: Project (752a1a2548614dd99e7745f15bacc208) in parent group-v767796. [ 796.856250] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Creating folder: Instances. Parent ref: group-v767923. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 796.856543] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-15fac38c-971b-45f4-9646-ccc58b385cac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.869301] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Created folder: Instances in parent group-v767923. [ 796.869597] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.876388] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.876388] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-607f8541-773c-4dc0-b0e7-1ee54c536dd8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.902395] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.902395] env[69982]: value = "task-3864491" [ 796.902395] env[69982]: _type = "Task" [ 796.902395] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.915476] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864491, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.072907] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676588} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.073011] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d2684194-a688-4466-9852-1f4ff656f057/d2684194-a688-4466-9852-1f4ff656f057.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.073273] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.073563] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bda4c70-768b-4e85-9beb-7db356fc9b51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.078022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c579437-0c0a-4cb7-a929-12b0d12a4c9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.094512] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 797.094512] env[69982]: value = "task-3864492" [ 797.094512] env[69982]: _type = "Task" [ 797.094512] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.096141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bbd7657-5912-447b-9136-88e324ccc53e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.138539] env[69982]: DEBUG nova.network.neutron [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.142476] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864492, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.142547] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef705e64-2172-4f07-b833-f8dacce4012d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.154473] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01f2573-9a05-487b-8356-c39ca90f1243 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.173894] env[69982]: DEBUG nova.compute.provider_tree [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.227937] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864487, 'name': PowerOffVM_Task, 'duration_secs': 0.272089} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.228774] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 797.228774] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 797.229481] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e82070-84d4-4102-8c7b-b8ea89cf5f04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.237958] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 797.238216] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd3874ab-c48e-4c26-b7a6-515594326711 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.307534] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 797.307767] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 797.307968] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore1] d5c23433-a0f3-4f0a-9c62-051d07dcd712 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 797.308254] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e916c338-0b57-42da-9993-61b66b3c59e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.315716] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 797.315716] env[69982]: value = "task-3864494" [ 797.315716] env[69982]: _type = "Task" [ 797.315716] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.329064] env[69982]: DEBUG oslo_vmware.api [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Task: {'id': task-3864488, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244901} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.333383] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.333609] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.333865] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.333986] env[69982]: INFO nova.compute.manager [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 797.334274] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.334494] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.336117] env[69982]: DEBUG nova.compute.manager [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.336117] env[69982]: DEBUG nova.network.neutron [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 797.414189] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864491, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.517153] env[69982]: DEBUG nova.compute.manager [req-a4a2e71b-c315-4dea-92bb-a58407595e45 req-113a5079-09b9-4ddb-a1c3-9ab6802615ca service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Received event network-vif-deleted-0fd7d67c-3b77-4d0b-ad20-76668f828c54 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.517358] env[69982]: INFO nova.compute.manager [req-a4a2e71b-c315-4dea-92bb-a58407595e45 req-113a5079-09b9-4ddb-a1c3-9ab6802615ca service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Neutron deleted interface 0fd7d67c-3b77-4d0b-ad20-76668f828c54; detaching it from the instance and deleting it from the info cache [ 797.517530] env[69982]: DEBUG nova.network.neutron [req-a4a2e71b-c315-4dea-92bb-a58407595e45 req-113a5079-09b9-4ddb-a1c3-9ab6802615ca service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.608406] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864492, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081199} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.608719] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.609419] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40a9539-89eb-46d1-9b0d-431dc1746b19 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.634303] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] d2684194-a688-4466-9852-1f4ff656f057/d2684194-a688-4466-9852-1f4ff656f057.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.635245] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08d077cb-4271-4c82-8193-ee5ac3e75592 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.652322] env[69982]: DEBUG nova.network.neutron [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.654084] env[69982]: INFO nova.compute.manager [-] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Took 1.07 seconds to deallocate network for instance. [ 797.664459] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 797.664459] env[69982]: value = "task-3864495" [ 797.664459] env[69982]: _type = "Task" [ 797.664459] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.676222] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864495, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.677017] env[69982]: DEBUG nova.scheduler.client.report [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.750766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.751135] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.830131] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187649} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.830578] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.830872] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.831253] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.914522] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864491, 'name': CreateVM_Task, 'duration_secs': 0.604908} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.914965] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 797.915454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.915634] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.916548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 797.916548] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4a34363-149f-4e35-b32d-90b55e9da2d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.922862] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 797.922862] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8f903-e061-38cd-c33c-32807eee3c3a" [ 797.922862] env[69982]: _type = "Task" [ 797.922862] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.933679] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8f903-e061-38cd-c33c-32807eee3c3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.022036] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bd31a8e-ca90-44dd-a510-c6ad09fe483d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.032376] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73865ed6-a278-4df5-ba3b-95efc66d7284 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.068806] env[69982]: DEBUG nova.compute.manager [req-a4a2e71b-c315-4dea-92bb-a58407595e45 req-113a5079-09b9-4ddb-a1c3-9ab6802615ca service nova] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Detach interface failed, port_id=0fd7d67c-3b77-4d0b-ad20-76668f828c54, reason: Instance ad0c405f-48c8-4726-8e95-eb83a6e158fe could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 798.157637] env[69982]: INFO nova.compute.manager [-] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Took 1.83 seconds to deallocate network for instance. [ 798.166780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.187033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.457s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.191354] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864495, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.191691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.309s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.191974] env[69982]: DEBUG nova.objects.instance [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lazy-loading 'resources' on Instance uuid 25957956-0d50-4b4f-8e5c-f55a1e182235 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.224283] env[69982]: INFO nova.scheduler.client.report [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Deleted allocations for instance 67f59d53-c61b-48ad-b55d-710595e9dae3 [ 798.436381] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8f903-e061-38cd-c33c-32807eee3c3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010742} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.436772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.436991] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.437418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.437598] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.437795] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.438099] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-657228b5-16fb-4987-b6ee-7c04d22eb0e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.450039] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.450251] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.451297] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-966c9838-10cc-417f-9092-206c135504c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.457447] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 798.457447] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dab5ae-8028-2b6c-c2cc-7b90952a9d5e" [ 798.457447] env[69982]: _type = "Task" [ 798.457447] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.467773] env[69982]: DEBUG nova.network.neutron [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.470061] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dab5ae-8028-2b6c-c2cc-7b90952a9d5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.678378] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.678854] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864495, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.736072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b19d124-f3f6-49ec-83d2-70ea4df80b15 tempest-ServersTestBootFromVolume-927506322 tempest-ServersTestBootFromVolume-927506322-project-member] Lock "67f59d53-c61b-48ad-b55d-710595e9dae3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.808s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.870158] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 798.870459] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.870617] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 798.870797] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.870942] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 798.871105] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 798.871321] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 798.871478] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 798.871640] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 798.871802] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 798.871978] env[69982]: DEBUG nova.virt.hardware [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 798.872870] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1025d742-9079-4113-8120-e82d31a9d0d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.884670] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409f6a79-c819-41a8-b08f-9d3a46dd2cab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.902549] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:50:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '43f402d8-57b0-4bf5-a153-b4dc7f97d98d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.910173] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 798.913071] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 798.913658] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5d3c24d-6008-45b4-b46b-b34c41acf3d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.936170] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.936170] env[69982]: value = "task-3864496" [ 798.936170] env[69982]: _type = "Task" [ 798.936170] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.946384] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864496, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.971208] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dab5ae-8028-2b6c-c2cc-7b90952a9d5e, 'name': SearchDatastore_Task, 'duration_secs': 0.011605} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.971208] env[69982]: INFO nova.compute.manager [-] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Took 1.64 seconds to deallocate network for instance. [ 798.971208] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b71e989e-4f4b-440a-955a-45f90f3303bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.980381] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 798.980381] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524d4f9c-c8ce-4ef6-35bf-030775dcadc6" [ 798.980381] env[69982]: _type = "Task" [ 798.980381] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.992872] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524d4f9c-c8ce-4ef6-35bf-030775dcadc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.179796] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864495, 'name': ReconfigVM_Task, 'duration_secs': 1.06514} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.180153] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Reconfigured VM instance instance-0000002b to attach disk [datastore2] d2684194-a688-4466-9852-1f4ff656f057/d2684194-a688-4466-9852-1f4ff656f057.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 799.180834] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83cf8c34-6153-4a5f-83ba-d4065a9f5ba0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.187716] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 799.187716] env[69982]: value = "task-3864497" [ 799.187716] env[69982]: _type = "Task" [ 799.187716] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.197959] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864497, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.298832] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d80e7d0-fce5-4582-8fdf-04fb5fd16b53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.312799] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadf1174-34f9-4449-a01f-5ad59cc59315 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.351153] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a85fff8-00d5-4eed-9ddf-a44c82ab27d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.361047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28705554-631c-4277-aa35-02a562f6047a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.375846] env[69982]: DEBUG nova.compute.provider_tree [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.451453] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864496, 'name': CreateVM_Task, 'duration_secs': 0.367779} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.451649] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.452388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.452566] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.452896] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 799.453204] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d4087c-cfa8-4e95-81a1-e34c2f5209ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.462690] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 799.462690] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f421b6-2f46-47ac-c8e2-a2df28f7b2ef" [ 799.462690] env[69982]: _type = "Task" [ 799.462690] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.472442] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f421b6-2f46-47ac-c8e2-a2df28f7b2ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.488982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.495489] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524d4f9c-c8ce-4ef6-35bf-030775dcadc6, 'name': SearchDatastore_Task, 'duration_secs': 0.014264} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.496411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.496411] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.496616] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d189824-e9ae-4341-ac98-5a977e2e9da8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.504462] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 799.504462] env[69982]: value = "task-3864498" [ 799.504462] env[69982]: _type = "Task" [ 799.504462] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.514968] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.609280] env[69982]: DEBUG nova.compute.manager [req-5171da85-6100-4156-bc3c-6fba0c93a515 req-159c9fb3-88a3-41e7-b3cc-18c7811383b7 service nova] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Received event network-vif-deleted-ad716338-99c7-49f2-a530-97e342fab644 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 799.701399] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864497, 'name': Rename_Task, 'duration_secs': 0.251445} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.701759] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.702008] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4055ee9-041a-4f5e-87ee-c1fa4890d58c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.712840] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 799.712840] env[69982]: value = "task-3864499" [ 799.712840] env[69982]: _type = "Task" [ 799.712840] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.723625] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.880390] env[69982]: DEBUG nova.scheduler.client.report [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.977429] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f421b6-2f46-47ac-c8e2-a2df28f7b2ef, 'name': SearchDatastore_Task, 'duration_secs': 0.012114} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.980024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.980024] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 799.980024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.980024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.980418] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.980418] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1cdd6891-d03d-4ea0-96bf-12b49771cc2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.997281] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.997476] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 799.998356] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9fbe000-08b6-4c7a-b6f5-fb21460f83ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.010037] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 800.010037] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ff5162-d231-9eb0-569a-cea14bc9da79" [ 800.010037] env[69982]: _type = "Task" [ 800.010037] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.017418] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864498, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.023823] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ff5162-d231-9eb0-569a-cea14bc9da79, 'name': SearchDatastore_Task, 'duration_secs': 0.014142} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.025193] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcb9497f-6a12-4daf-8abb-130e6843048f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.030177] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 800.030177] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527bad7c-9200-a229-88e5-c27701ff578a" [ 800.030177] env[69982]: _type = "Task" [ 800.030177] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.039966] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527bad7c-9200-a229-88e5-c27701ff578a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.223091] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864499, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.386372] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.388858] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.380s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.389112] env[69982]: DEBUG nova.objects.instance [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lazy-loading 'resources' on Instance uuid bba73604-c54f-4643-9e4c-326b643b3d51 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 800.422268] env[69982]: INFO nova.scheduler.client.report [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted allocations for instance 25957956-0d50-4b4f-8e5c-f55a1e182235 [ 800.519967] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864498, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529217} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.520392] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.522049] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.522049] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-055c5f6a-2384-4e11-9593-54f2f70d802a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.528098] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 800.528098] env[69982]: value = "task-3864500" [ 800.528098] env[69982]: _type = "Task" [ 800.528098] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.544507] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.544849] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527bad7c-9200-a229-88e5-c27701ff578a, 'name': SearchDatastore_Task, 'duration_secs': 0.014328} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.545280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.545370] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 800.545629] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b228810-b055-452d-a0b8-b0be46d83128 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.557023] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 800.557023] env[69982]: value = "task-3864501" [ 800.557023] env[69982]: _type = "Task" [ 800.557023] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.563248] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.724086] env[69982]: DEBUG oslo_vmware.api [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864499, 'name': PowerOnVM_Task, 'duration_secs': 0.991952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.724522] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.724774] env[69982]: INFO nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Took 9.15 seconds to spawn the instance on the hypervisor. [ 800.724962] env[69982]: DEBUG nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.725907] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1bdaa4-4f2b-45f3-a0e1-a42f199e7e8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.935622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65b4c931-4421-45a6-8576-c7078c34c090 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "25957956-0d50-4b4f-8e5c-f55a1e182235" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.110s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.044168] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071154} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.048299] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.049739] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fdd948-5ce1-4feb-addb-950c857c1b9b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.078437] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.081996] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af381c81-69cc-40f8-9228-b7037fa9f265 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.102452] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864501, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.109039] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 801.109039] env[69982]: value = "task-3864502" [ 801.109039] env[69982]: _type = "Task" [ 801.109039] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.122893] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864502, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.256186] env[69982]: INFO nova.compute.manager [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Took 39.27 seconds to build instance. [ 801.554048] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0117b103-2bdd-4b09-9142-48e8b4d2d4c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.566263] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538798} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.568805] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 801.569187] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 801.569589] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6b0d97f-b490-4313-8d70-9390f8a93e30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.572487] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87de4664-5044-4d6b-8ea9-3e6d78a508d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.606276] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eca5971-c7a3-48d1-a265-55cad6b90646 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.609148] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 801.609148] env[69982]: value = "task-3864503" [ 801.609148] env[69982]: _type = "Task" [ 801.609148] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.622305] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1838ed4b-af5b-41f6-ad70-9affa2a69626 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.632802] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.633087] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864502, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.647478] env[69982]: DEBUG nova.compute.provider_tree [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.758146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fd2f708-98f9-4fa1-9fc7-ddee99074f1b tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.059s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.124052] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12123} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.127196] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.127555] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864502, 'name': ReconfigVM_Task, 'duration_secs': 0.560548} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.130014] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c52f71-1c59-47fd-864b-dcf79c481adc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.130967] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Reconfigured VM instance instance-0000002c to attach disk [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.131608] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be7ae1dd-372b-46ce-bb6e-4f274b1881f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.158556] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.160916] env[69982]: DEBUG nova.scheduler.client.report [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 802.164548] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6341822a-00f1-4d44-b9fa-6d7de2f45c0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.180975] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 802.180975] env[69982]: value = "task-3864504" [ 802.180975] env[69982]: _type = "Task" [ 802.180975] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.188546] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 802.188546] env[69982]: value = "task-3864505" [ 802.188546] env[69982]: _type = "Task" [ 802.188546] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.195703] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864504, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.201748] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864505, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.261191] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 802.581527] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "48dbc665-8286-4d5d-af4e-1a85d1742952" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.581729] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.684108] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.294s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.686786] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.867s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.688054] env[69982]: DEBUG nova.objects.instance [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lazy-loading 'resources' on Instance uuid 365b8207-f49b-4ee7-af6f-9d271eed2e38 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.706378] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864504, 'name': Rename_Task, 'duration_secs': 0.149652} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.709779] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.710218] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864505, 'name': ReconfigVM_Task, 'duration_secs': 0.295803} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.710420] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a63bf5e-8db5-4c02-8032-e1831816be07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.715155] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Reconfigured VM instance instance-00000029 to attach disk [datastore2] d5c23433-a0f3-4f0a-9c62-051d07dcd712/d5c23433-a0f3-4f0a-9c62-051d07dcd712.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.715823] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44bbfbb5-0435-47cd-aa2c-fa7f6879bb0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.718598] env[69982]: INFO nova.scheduler.client.report [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted allocations for instance bba73604-c54f-4643-9e4c-326b643b3d51 [ 802.725682] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 802.725682] env[69982]: value = "task-3864506" [ 802.725682] env[69982]: _type = "Task" [ 802.725682] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.731332] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 802.731332] env[69982]: value = "task-3864507" [ 802.731332] env[69982]: _type = "Task" [ 802.731332] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.737586] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864506, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.744053] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864507, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.787405] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.251978] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9ba2306-b74f-4059-b597-17c8a65d67d2 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "bba73604-c54f-4643-9e4c-326b643b3d51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.767s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.260933] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864506, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.272945] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864507, 'name': Rename_Task, 'duration_secs': 0.144704} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.277282] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 803.277851] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5581984-440f-4136-b301-d280d938a356 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.286067] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 803.286067] env[69982]: value = "task-3864508" [ 803.286067] env[69982]: _type = "Task" [ 803.286067] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.303889] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.741356] env[69982]: DEBUG oslo_vmware.api [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864506, 'name': PowerOnVM_Task, 'duration_secs': 0.532696} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.741517] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.741658] env[69982]: INFO nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Took 6.99 seconds to spawn the instance on the hypervisor. [ 803.741804] env[69982]: DEBUG nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.745480] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d666933-bbfd-4b6a-82ce-5094292533c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.810444] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864508, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.923369] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c941c8-13e7-4b7a-b0c6-87c5bd2a7da0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.932818] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2212f163-c797-4e5d-9c9a-d458354d4a5e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.965090] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f0448-fb40-442d-8d87-817a5b4344cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.988627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587e500c-172f-464f-930b-077f79ceff02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.006234] env[69982]: DEBUG nova.compute.provider_tree [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.120544] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "6de35617-22cf-4a32-8651-28ea67532b8f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.120866] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.121039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.121225] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.121392] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.123978] env[69982]: INFO nova.compute.manager [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Terminating instance [ 804.268466] env[69982]: INFO nova.compute.manager [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Took 37.54 seconds to build instance. [ 804.312286] env[69982]: DEBUG oslo_vmware.api [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864508, 'name': PowerOnVM_Task, 'duration_secs': 0.578702} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.312286] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 804.312286] env[69982]: DEBUG nova.compute.manager [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 804.312286] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d97198d-12c1-42ed-ab00-0e54fe19b285 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.509588] env[69982]: DEBUG nova.scheduler.client.report [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 804.631804] env[69982]: DEBUG nova.compute.manager [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 804.631907] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 804.633025] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4367d528-3c8d-42bf-ac01-a8538e95f76f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.643010] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 804.643302] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1dfe751e-c0b4-40ac-a749-00116f27ae44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.652060] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 804.652060] env[69982]: value = "task-3864509" [ 804.652060] env[69982]: _type = "Task" [ 804.652060] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.662121] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.771472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad6839e2-c199-4fac-a9a0-37c456df4414 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.053s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.835968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.018015] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.331s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.020508] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.046s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.022617] env[69982]: INFO nova.compute.claims [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.034985] env[69982]: INFO nova.scheduler.client.report [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Deleted allocations for instance 365b8207-f49b-4ee7-af6f-9d271eed2e38 [ 805.163137] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864509, 'name': PowerOffVM_Task, 'duration_secs': 0.444758} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.163431] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 805.163602] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 805.163862] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8de9c77b-2423-403d-b418-11737c343def {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.239025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 805.239279] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 805.239558] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleting the datastore file [datastore2] 6de35617-22cf-4a32-8651-28ea67532b8f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 805.239852] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-185a3ee0-a1a7-4a5e-884a-e9d07e9391ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.247980] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for the task: (returnval){ [ 805.247980] env[69982]: value = "task-3864511" [ 805.247980] env[69982]: _type = "Task" [ 805.247980] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.258920] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864511, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.278340] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 805.310577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.310577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.547680] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aabe7958-9c0a-4d27-8a36-9fafdc32cbee tempest-ServersAdmin275Test-1093515878 tempest-ServersAdmin275Test-1093515878-project-member] Lock "365b8207-f49b-4ee7-af6f-9d271eed2e38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.585s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.704388] env[69982]: INFO nova.compute.manager [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Rebuilding instance [ 805.759182] env[69982]: DEBUG nova.compute.manager [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.760350] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214418d2-6338-484f-9d72-2caadc7ca9f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.775454] env[69982]: DEBUG oslo_vmware.api [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Task: {'id': task-3864511, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.288634} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.779190] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 805.779190] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 805.779190] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.779190] env[69982]: INFO nova.compute.manager [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 805.779190] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 805.779367] env[69982]: DEBUG nova.compute.manager [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 805.779367] env[69982]: DEBUG nova.network.neutron [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 805.804071] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.060897] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.061298] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.061494] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.061703] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.061881] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.068561] env[69982]: INFO nova.compute.manager [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Terminating instance [ 806.271753] env[69982]: DEBUG nova.compute.manager [req-1c68483a-61b2-46c7-bb24-5898eb68265f req-13764a91-88ce-4dfa-9256-d30c5acb91f1 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Received event network-vif-deleted-2819d991-2637-403b-b564-70e27a846f61 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.271753] env[69982]: INFO nova.compute.manager [req-1c68483a-61b2-46c7-bb24-5898eb68265f req-13764a91-88ce-4dfa-9256-d30c5acb91f1 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Neutron deleted interface 2819d991-2637-403b-b564-70e27a846f61; detaching it from the instance and deleting it from the info cache [ 806.271753] env[69982]: DEBUG nova.network.neutron [req-1c68483a-61b2-46c7-bb24-5898eb68265f req-13764a91-88ce-4dfa-9256-d30c5acb91f1 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.577803] env[69982]: DEBUG nova.compute.manager [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 806.578355] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.579570] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ccecad-40f1-4777-a06f-e7dbe0734696 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.588276] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 806.589036] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34d64cef-311e-4495-afa2-b4b6e56651bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.599490] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 806.599490] env[69982]: value = "task-3864512" [ 806.599490] env[69982]: _type = "Task" [ 806.599490] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.612109] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.650361] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc3c49a-5993-4a8d-8257-c4c2529ace32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.660444] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e587f2f0-410c-4dcf-9d9c-67fd49d5651e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.697195] env[69982]: DEBUG nova.network.neutron [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.700582] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c297413b-bc1d-4125-91cb-d0c86928a382 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.708094] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.708345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.716882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6b5017-d4ad-4562-a575-b3583536bf3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.733187] env[69982]: DEBUG nova.compute.provider_tree [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.777020] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d47f1b49-7b32-44d2-8afe-0a44c19fcd58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.780436] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 806.780436] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b679bc4-70dc-4f2b-817e-e07054e65264 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.788285] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2869f58a-fe6b-4fee-860e-d370ab877c03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.800631] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 806.800631] env[69982]: value = "task-3864513" [ 806.800631] env[69982]: _type = "Task" [ 806.800631] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.811274] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864513, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.826620] env[69982]: DEBUG nova.compute.manager [req-1c68483a-61b2-46c7-bb24-5898eb68265f req-13764a91-88ce-4dfa-9256-d30c5acb91f1 service nova] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Detach interface failed, port_id=2819d991-2637-403b-b564-70e27a846f61, reason: Instance 6de35617-22cf-4a32-8651-28ea67532b8f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 807.113453] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864512, 'name': PowerOffVM_Task, 'duration_secs': 0.260485} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.113868] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.114540] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.114540] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96315ad6-24a9-4065-96d7-75fb082f5814 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.182428] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.182714] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.182880] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore2] d5c23433-a0f3-4f0a-9c62-051d07dcd712 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.183199] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3cff693-bedd-446c-b263-06335198d91b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.191285] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 807.191285] env[69982]: value = "task-3864515" [ 807.191285] env[69982]: _type = "Task" [ 807.191285] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.200712] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864515, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.208509] env[69982]: INFO nova.compute.manager [-] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Took 1.43 seconds to deallocate network for instance. [ 807.240237] env[69982]: DEBUG nova.scheduler.client.report [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 807.312677] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864513, 'name': PowerOffVM_Task, 'duration_secs': 0.285536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.312968] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.313917] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.314853] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e478598-f55e-4b73-b5b8-fe58eb899f41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.324880] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.325390] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e3f9245-fc2c-4cf4-ac86-95f28ff37b22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.356630] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 807.356880] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 807.357075] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Deleting the datastore file [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 807.357361] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8f51b59c-c0a7-4c57-9759-cd9bfa163ace {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.366283] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 807.366283] env[69982]: value = "task-3864517" [ 807.366283] env[69982]: _type = "Task" [ 807.366283] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.375980] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864517, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.702016] env[69982]: DEBUG oslo_vmware.api [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864515, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165753} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.702308] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.702540] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.702729] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.702901] env[69982]: INFO nova.compute.manager [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Took 1.12 seconds to destroy the instance on the hypervisor. [ 807.703159] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.703427] env[69982]: DEBUG nova.compute.manager [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 807.703500] env[69982]: DEBUG nova.network.neutron [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 807.716542] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.746780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.726s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.747316] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 807.750040] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.791s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.754021] env[69982]: DEBUG nova.objects.instance [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lazy-loading 'resources' on Instance uuid 24641406-5292-4497-b34f-9af0dcdc58d7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.876891] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864517, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115846} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.877186] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.877383] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.877566] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.254019] env[69982]: DEBUG nova.compute.utils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 808.259248] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 808.259248] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 808.324987] env[69982]: DEBUG nova.compute.manager [req-4b732736-ea90-4a0e-ae81-457b26fef5a9 req-110d004b-9873-47dc-9f97-3cdf5be86bed service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Received event network-vif-deleted-43f402d8-57b0-4bf5-a153-b4dc7f97d98d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 808.325249] env[69982]: INFO nova.compute.manager [req-4b732736-ea90-4a0e-ae81-457b26fef5a9 req-110d004b-9873-47dc-9f97-3cdf5be86bed service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Neutron deleted interface 43f402d8-57b0-4bf5-a153-b4dc7f97d98d; detaching it from the instance and deleting it from the info cache [ 808.325334] env[69982]: DEBUG nova.network.neutron [req-4b732736-ea90-4a0e-ae81-457b26fef5a9 req-110d004b-9873-47dc-9f97-3cdf5be86bed service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.336283] env[69982]: DEBUG nova.policy [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e8a458282a94c7493628f2307755af3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6642ce24dd0040fea9a3f89b22343330', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.622185] env[69982]: DEBUG nova.network.neutron [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.760276] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 808.835600] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a888134d-1175-4fad-90bb-a977ef64a040 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.855108] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b0abeb-2461-45c5-8a13-d15d39a96474 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.896278] env[69982]: DEBUG nova.compute.manager [req-4b732736-ea90-4a0e-ae81-457b26fef5a9 req-110d004b-9873-47dc-9f97-3cdf5be86bed service nova] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Detach interface failed, port_id=43f402d8-57b0-4bf5-a153-b4dc7f97d98d, reason: Instance d5c23433-a0f3-4f0a-9c62-051d07dcd712 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 808.898072] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Successfully created port: e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.937882] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.938179] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.938506] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.938566] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.938718] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.938862] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.939128] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.939249] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.939417] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.939609] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.939797] env[69982]: DEBUG nova.virt.hardware [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.940877] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0e1df1-50a1-49c7-8c64-a0096ac5b972 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.954180] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75084400-610b-4fbe-a800-13a2c327e9b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.964310] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80860327-5d7e-4f06-9827-edad80d8ee4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.983315] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 808.989280] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.991637] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 808.991933] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-407673a8-c1be-423c-8740-4047c3f60d0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.004597] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-660c97f9-6b07-40e1-9d9b-3c77f27bb449 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.015218] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 809.015218] env[69982]: value = "task-3864518" [ 809.015218] env[69982]: _type = "Task" [ 809.015218] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.044080] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673d7e18-88da-43fb-9e87-284d1b987cd4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.052773] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864518, 'name': CreateVM_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.056130] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc59f1ec-9bbf-42d2-9059-595b436c0452 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.073573] env[69982]: DEBUG nova.compute.provider_tree [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.124518] env[69982]: INFO nova.compute.manager [-] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Took 1.42 seconds to deallocate network for instance. [ 809.559148] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864518, 'name': CreateVM_Task, 'duration_secs': 0.312636} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.559148] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 809.559148] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.559148] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.559148] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 809.559148] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91ab4616-609b-4c17-b203-99356025d5d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.567040] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 809.567040] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c2503-a39a-f1d1-5d0a-a1ecec375da9" [ 809.567040] env[69982]: _type = "Task" [ 809.567040] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.575660] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c2503-a39a-f1d1-5d0a-a1ecec375da9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.579204] env[69982]: DEBUG nova.scheduler.client.report [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.631604] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.770888] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 809.818527] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 809.818785] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.818939] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 809.819134] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.819281] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 809.819431] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 809.819634] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 809.819792] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 809.819959] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 809.820292] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 809.820500] env[69982]: DEBUG nova.virt.hardware [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 809.821851] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880a53a5-8440-4657-a937-b9afdfa87eca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.831241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff3d3c8-594e-4e94-a770-dce841bf99e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.076061] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c2503-a39a-f1d1-5d0a-a1ecec375da9, 'name': SearchDatastore_Task, 'duration_secs': 0.01249} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.076394] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.076633] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 810.076870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.077020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.077205] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 810.077479] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a2e26de-d0db-4da9-a6e8-305b5b951191 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.084769] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.334s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.088289] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.051s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.089430] env[69982]: INFO nova.compute.claims [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.092670] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 810.092670] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 810.093237] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdcd865f-5655-4833-a873-c3aa574d3b9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.101467] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 810.101467] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524f8ee8-39a8-fd53-0775-3603335e7834" [ 810.101467] env[69982]: _type = "Task" [ 810.101467] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.109664] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524f8ee8-39a8-fd53-0775-3603335e7834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.122408] env[69982]: INFO nova.scheduler.client.report [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleted allocations for instance 24641406-5292-4497-b34f-9af0dcdc58d7 [ 810.612379] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524f8ee8-39a8-fd53-0775-3603335e7834, 'name': SearchDatastore_Task, 'duration_secs': 0.011695} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.617056] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02460367-c9f4-4159-8195-1f49419d61dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.622069] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 810.622069] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5235cf19-0215-d131-b3ed-409d71334c11" [ 810.622069] env[69982]: _type = "Task" [ 810.622069] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.633143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d3b26902-7c7a-40b1-bdb9-79602e0ef4e3 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.919s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.634304] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 31.879s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.634569] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.634776] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.635126] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.647907] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5235cf19-0215-d131-b3ed-409d71334c11, 'name': SearchDatastore_Task, 'duration_secs': 0.011036} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.650258] env[69982]: INFO nova.compute.manager [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Terminating instance [ 810.653292] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.653610] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 810.655162] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55c8d0a1-ac9a-453f-9ef1-f8bd0a6700a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.663437] env[69982]: DEBUG nova.compute.manager [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Received event network-vif-plugged-e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 810.663437] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] Acquiring lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.664661] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.664661] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.664661] env[69982]: DEBUG nova.compute.manager [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] No waiting events found dispatching network-vif-plugged-e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 810.664661] env[69982]: WARNING nova.compute.manager [req-9a26c909-deb9-4355-90a2-b3135394f4a4 req-7246f0f9-f9f4-4896-bf12-22dc9da428eb service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Received unexpected event network-vif-plugged-e56d9c29-71a4-4d37-b444-7c544cd50695 for instance with vm_state building and task_state spawning. [ 810.671312] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 810.671312] env[69982]: value = "task-3864519" [ 810.671312] env[69982]: _type = "Task" [ 810.671312] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.683744] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.833577] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Successfully updated port: e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.158220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.158487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquired lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.158676] env[69982]: DEBUG nova.network.neutron [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 811.184579] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864519, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.333174] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.333362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.333505] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 811.664619] env[69982]: DEBUG nova.compute.utils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Can not refresh info_cache because instance was not found {{(pid=69982) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 811.683167] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864519, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52857} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.689019] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 811.689019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 811.689019] env[69982]: DEBUG nova.network.neutron [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 811.689839] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3497eb4-8285-464a-83eb-98a4240094e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.698529] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 811.698529] env[69982]: value = "task-3864520" [ 811.698529] env[69982]: _type = "Task" [ 811.698529] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.704690] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2edd80-b4f2-48d3-a928-61a74292ecd9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.710702] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.715975] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fbb2f2-ed1e-467d-80e0-9102af7c64e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.755153] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bcece7-2356-4776-a318-ce5eae2f36bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.763917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493f5879-f91b-4350-bb61-b226eccb5058 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.779827] env[69982]: DEBUG nova.compute.provider_tree [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.806508] env[69982]: DEBUG nova.network.neutron [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.889039] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 812.046334] env[69982]: DEBUG nova.network.neutron [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updating instance_info_cache with network_info: [{"id": "e56d9c29-71a4-4d37-b444-7c544cd50695", "address": "fa:16:3e:c7:5f:e0", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56d9c29-71", "ovs_interfaceid": "e56d9c29-71a4-4d37-b444-7c544cd50695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.101958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "589419ea-c609-45bb-bde5-3b22d9ff111e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.102465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.102890] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.104022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.104022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.106412] env[69982]: INFO nova.compute.manager [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Terminating instance [ 812.211763] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07588} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.214019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 812.214019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770be3c2-443c-47b4-87d9-77b7a474296c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.238981] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.239523] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66fc580e-3449-4534-aa63-47494cf87921 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.266373] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 812.266373] env[69982]: value = "task-3864521" [ 812.266373] env[69982]: _type = "Task" [ 812.266373] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.275704] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864521, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.284914] env[69982]: DEBUG nova.scheduler.client.report [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 812.309513] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Releasing lock "refresh_cache-24641406-5292-4497-b34f-9af0dcdc58d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.309798] env[69982]: DEBUG nova.compute.manager [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.309991] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.310396] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-868167c1-ed7b-4d8a-94fb-1bd7273fb3c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.321249] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d9c12d-9279-4f6e-941b-ab213889d81e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.356554] env[69982]: WARNING nova.virt.vmwareapi.vmops [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 24641406-5292-4497-b34f-9af0dcdc58d7 could not be found. [ 812.356873] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 812.357124] env[69982]: INFO nova.compute.manager [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 0.05 seconds to destroy the instance on the hypervisor. [ 812.357406] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.357662] env[69982]: DEBUG nova.compute.manager [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 812.357755] env[69982]: DEBUG nova.network.neutron [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 812.376000] env[69982]: DEBUG nova.network.neutron [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 812.547810] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.548184] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Instance network_info: |[{"id": "e56d9c29-71a4-4d37-b444-7c544cd50695", "address": "fa:16:3e:c7:5f:e0", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56d9c29-71", "ovs_interfaceid": "e56d9c29-71a4-4d37-b444-7c544cd50695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 812.548649] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:5f:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e56d9c29-71a4-4d37-b444-7c544cd50695', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.556975] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating folder: Project (6642ce24dd0040fea9a3f89b22343330). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 812.557342] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-16c856cd-14f9-4d55-937c-50662e2333ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.569435] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created folder: Project (6642ce24dd0040fea9a3f89b22343330) in parent group-v767796. [ 812.569627] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating folder: Instances. Parent ref: group-v767928. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 812.569878] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9eca6fc-9a99-47ae-9d92-e3c701eb8264 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.579146] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created folder: Instances in parent group-v767928. [ 812.579402] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.579595] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 812.579807] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb20a1ac-8fb4-401c-8cf2-005650a1b7d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.599506] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.599506] env[69982]: value = "task-3864524" [ 812.599506] env[69982]: _type = "Task" [ 812.599506] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.607686] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864524, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.611559] env[69982]: DEBUG nova.compute.manager [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 812.611783] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 812.612639] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dafeda9-7625-4774-9b01-eecb2ed684e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.621923] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 812.622238] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddb1dce1-8c66-4bc7-a7f2-2ccc4fa0d6e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.630309] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 812.630309] env[69982]: value = "task-3864525" [ 812.630309] env[69982]: _type = "Task" [ 812.630309] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.641950] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.693664] env[69982]: DEBUG nova.compute.manager [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Received event network-changed-e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.693912] env[69982]: DEBUG nova.compute.manager [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Refreshing instance network info cache due to event network-changed-e56d9c29-71a4-4d37-b444-7c544cd50695. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 812.694170] env[69982]: DEBUG oslo_concurrency.lockutils [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] Acquiring lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.694285] env[69982]: DEBUG oslo_concurrency.lockutils [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] Acquired lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.694472] env[69982]: DEBUG nova.network.neutron [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Refreshing network info cache for port e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 812.779336] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864521, 'name': ReconfigVM_Task, 'duration_secs': 0.300429} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.779688] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Reconfigured VM instance instance-0000002c to attach disk [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a/fae97132-44b4-4df1-bd34-ba694ea7016a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 812.780488] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c5ded2b-043f-4f19-a7cc-7434230c8190 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.788455] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 812.788455] env[69982]: value = "task-3864526" [ 812.788455] env[69982]: _type = "Task" [ 812.788455] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.792740] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.793320] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 812.796018] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.367s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.797854] env[69982]: INFO nova.compute.claims [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 812.808698] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864526, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.878331] env[69982]: DEBUG nova.network.neutron [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.111153] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864524, 'name': CreateVM_Task, 'duration_secs': 0.481314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.111361] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.112130] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.112251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.112579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 813.112834] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aeedc07-9083-4ffb-8ec6-fde17ec835f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.118054] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 813.118054] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f23061-e2d4-44ca-5438-b72cf98c073e" [ 813.118054] env[69982]: _type = "Task" [ 813.118054] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.129483] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f23061-e2d4-44ca-5438-b72cf98c073e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.145034] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864525, 'name': PowerOffVM_Task, 'duration_secs': 0.214941} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.145034] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 813.145034] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 813.145034] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e313db-dfff-4924-8592-cec9bd3d850f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.220021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 813.220021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 813.220021] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleting the datastore file [datastore1] 589419ea-c609-45bb-bde5-3b22d9ff111e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 813.220021] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12b18f96-da3b-4c23-9f79-4de4615370e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.231095] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for the task: (returnval){ [ 813.231095] env[69982]: value = "task-3864528" [ 813.231095] env[69982]: _type = "Task" [ 813.231095] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.245497] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.316301] env[69982]: DEBUG nova.compute.utils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 813.321722] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864526, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.322846] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 813.322846] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 813.380795] env[69982]: INFO nova.compute.manager [-] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Took 1.02 seconds to deallocate network for instance. [ 813.392039] env[69982]: DEBUG nova.policy [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c84472005ef43d99658fa6f5cf59bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07f7b975ecb449a290e2ae6582e07016', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 813.633051] env[69982]: DEBUG nova.network.neutron [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updated VIF entry in instance network info cache for port e56d9c29-71a4-4d37-b444-7c544cd50695. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 813.633051] env[69982]: DEBUG nova.network.neutron [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updating instance_info_cache with network_info: [{"id": "e56d9c29-71a4-4d37-b444-7c544cd50695", "address": "fa:16:3e:c7:5f:e0", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56d9c29-71", "ovs_interfaceid": "e56d9c29-71a4-4d37-b444-7c544cd50695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.643925] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f23061-e2d4-44ca-5438-b72cf98c073e, 'name': SearchDatastore_Task, 'duration_secs': 0.010474} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.644850] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.645116] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.645359] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.645505] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.645877] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.646336] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-76b0a278-c233-4e6a-8191-f1259d73b033 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.434031] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Successfully created port: 94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.440511] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 814.441666] env[69982]: INFO nova.compute.manager [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance disappeared during terminate [ 814.445029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8775716d-d2d3-48dd-adcb-58afbd53e559 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "24641406-5292-4497-b34f-9af0dcdc58d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.808s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.445029] env[69982]: DEBUG oslo_concurrency.lockutils [req-3c42239e-ac38-4f6d-a8c7-b42a66dd9a8e req-1b4b4fe4-c310-40b8-90f0-4c28daf0df2b service nova] Releasing lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.460021] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864526, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.460728] env[69982]: DEBUG oslo_vmware.api [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Task: {'id': task-3864528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17117} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.462094] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 814.462320] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 814.462513] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 814.462687] env[69982]: INFO nova.compute.manager [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Took 1.85 seconds to destroy the instance on the hypervisor. [ 814.464306] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.464306] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.464306] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.464306] env[69982]: DEBUG nova.compute.manager [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 814.464306] env[69982]: DEBUG nova.network.neutron [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 814.467369] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b47e754a-821c-404e-9b69-4f0c7a5eceb7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.473646] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 814.473646] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9e1ee-ec57-b19c-8160-979000f216a8" [ 814.473646] env[69982]: _type = "Task" [ 814.473646] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.482585] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9e1ee-ec57-b19c-8160-979000f216a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.958048] env[69982]: DEBUG nova.compute.manager [req-235aafec-8e33-4f2f-8df6-075a32072aa1 req-c460ace2-cfd5-430b-89a6-f1113aad135d service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Received event network-vif-deleted-44b0fa67-6d20-4612-a177-c8b0ed39faaf {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.958295] env[69982]: INFO nova.compute.manager [req-235aafec-8e33-4f2f-8df6-075a32072aa1 req-c460ace2-cfd5-430b-89a6-f1113aad135d service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Neutron deleted interface 44b0fa67-6d20-4612-a177-c8b0ed39faaf; detaching it from the instance and deleting it from the info cache [ 814.958583] env[69982]: DEBUG nova.network.neutron [req-235aafec-8e33-4f2f-8df6-075a32072aa1 req-c460ace2-cfd5-430b-89a6-f1113aad135d service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.960208] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864526, 'name': Rename_Task, 'duration_secs': 1.869504} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.961240] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 814.961240] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8220b6f4-f8d8-4352-8492-0ab7be708d97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.972477] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 814.972477] env[69982]: value = "task-3864529" [ 814.972477] env[69982]: _type = "Task" [ 814.972477] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.988686] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864529, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.992430] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9e1ee-ec57-b19c-8160-979000f216a8, 'name': SearchDatastore_Task, 'duration_secs': 0.013329} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.993542] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ccdc5fd-13e0-4bb3-b1d3-d5d853c9af4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.002581] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 815.002581] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274aca6-26cd-7f3c-5358-dee33e4a985f" [ 815.002581] env[69982]: _type = "Task" [ 815.002581] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.013715] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274aca6-26cd-7f3c-5358-dee33e4a985f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.073912] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94c9ecd-feb9-4d53-805e-5abb5dd05c69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.082179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e36851-6cd0-4f99-b54e-5b423be1b09d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.117031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77939b4e-a1be-49e7-aefb-017e3eea6d1c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.123632] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2b2614-bd7e-4334-88d9-8066fa3c92a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.138356] env[69982]: DEBUG nova.compute.provider_tree [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.430407] env[69982]: DEBUG nova.network.neutron [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.461879] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 815.464250] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4da4b8e0-318b-48ac-9938-6b3fc6acbcad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.476363] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04d656c-476a-4f87-a708-be69b322ee85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.504638] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864529, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.507250] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 815.511461] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.511628] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 815.511817] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.511958] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 815.512115] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 815.512333] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 815.512491] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 815.512654] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 815.512811] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 815.512978] env[69982]: DEBUG nova.virt.hardware [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 815.516934] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7116cba2-a2cb-4693-995d-9c6e6399fdcd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.538779] env[69982]: DEBUG nova.compute.manager [req-235aafec-8e33-4f2f-8df6-075a32072aa1 req-c460ace2-cfd5-430b-89a6-f1113aad135d service nova] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Detach interface failed, port_id=44b0fa67-6d20-4612-a177-c8b0ed39faaf, reason: Instance 589419ea-c609-45bb-bde5-3b22d9ff111e could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 815.554400] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f85bff-a061-42c8-8893-58f1956ba6b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.561372] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274aca6-26cd-7f3c-5358-dee33e4a985f, 'name': SearchDatastore_Task, 'duration_secs': 0.010444} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.561651] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.562101] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4064177-051b-4ec8-a1fc-fa5d299add8b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.563260] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1764042-bb56-4aa2-b9c9-0fc2d1aa2d9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.586077] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 815.586077] env[69982]: value = "task-3864530" [ 815.586077] env[69982]: _type = "Task" [ 815.586077] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.595360] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.642294] env[69982]: DEBUG nova.scheduler.client.report [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 815.936986] env[69982]: INFO nova.compute.manager [-] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Took 1.47 seconds to deallocate network for instance. [ 815.986787] env[69982]: DEBUG oslo_vmware.api [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864529, 'name': PowerOnVM_Task, 'duration_secs': 0.61665} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.987138] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.987893] env[69982]: DEBUG nova.compute.manager [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.988252] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a74682-0c5e-417c-a7db-a207e80b30f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.102417] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864530, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.105366] env[69982]: DEBUG nova.compute.manager [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Received event network-vif-plugged-94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.105635] env[69982]: DEBUG oslo_concurrency.lockutils [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.106313] env[69982]: DEBUG oslo_concurrency.lockutils [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.106313] env[69982]: DEBUG oslo_concurrency.lockutils [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.106685] env[69982]: DEBUG nova.compute.manager [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] No waiting events found dispatching network-vif-plugged-94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.106928] env[69982]: WARNING nova.compute.manager [req-200350ac-64e8-4e8d-9789-11b97f6a8e05 req-82d775c1-6854-43f7-8788-1d835b191c0b service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Received unexpected event network-vif-plugged-94487a32-cb22-46ea-afe4-0dab4d8d18be for instance with vm_state building and task_state spawning. [ 816.149796] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.354s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.150405] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.155354] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.609s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.157293] env[69982]: INFO nova.compute.claims [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.193652] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Successfully updated port: 94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.445473] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.517486] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.599196] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.599491] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4064177-051b-4ec8-a1fc-fa5d299add8b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.599896] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.600348] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2cefe0e-7947-4891-a5ff-0796f8efceb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.608634] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 816.608634] env[69982]: value = "task-3864531" [ 816.608634] env[69982]: _type = "Task" [ 816.608634] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.617245] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864531, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.660039] env[69982]: DEBUG nova.compute.utils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 816.660777] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 816.661165] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 816.695782] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.697063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.697281] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 816.740188] env[69982]: DEBUG nova.policy [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847765544bc249f7b2f5a61020cddd46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38945e679e1c4550b82ada82b9b1b7ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.122450] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864531, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080456} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.122817] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.125252] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9319898d-f5a8-4c86-bf00-8c2d1785b6e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.149555] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4064177-051b-4ec8-a1fc-fa5d299add8b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.149885] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9af9b503-a84f-4b41-bb30-29e537e0d081 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.168162] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Successfully created port: 9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 817.170764] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.182997] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 817.182997] env[69982]: value = "task-3864532" [ 817.182997] env[69982]: _type = "Task" [ 817.182997] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.193086] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864532, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.252184] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 817.381910] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "fae97132-44b4-4df1-bd34-ba694ea7016a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.382196] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.382454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "fae97132-44b4-4df1-bd34-ba694ea7016a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.382596] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.382761] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.385433] env[69982]: INFO nova.compute.manager [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Terminating instance [ 817.453284] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.456622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.699583] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864532, 'name': ReconfigVM_Task, 'duration_secs': 0.489053} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.702706] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Reconfigured VM instance instance-0000002d to attach disk [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4064177-051b-4ec8-a1fc-fa5d299add8b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.703867] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a96f263-08d8-4eae-970d-8a2786b5566c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.712347] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 817.712347] env[69982]: value = "task-3864533" [ 817.712347] env[69982]: _type = "Task" [ 817.712347] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.725162] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864533, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.729297] env[69982]: DEBUG nova.network.neutron [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Updating instance_info_cache with network_info: [{"id": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "address": "fa:16:3e:cc:ec:de", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94487a32-cb", "ovs_interfaceid": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.816328] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef5fe6e-09cd-46f6-9bf2-6ea801a95aa8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.824592] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd06aa2-7145-41ad-9db4-1648c8ad0b53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.856772] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02742b6-c8be-4fbd-bb33-e8e43f389b35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.865444] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be48fcf-e12c-40bc-8a4c-f35ff592e2ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.880113] env[69982]: DEBUG nova.compute.provider_tree [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.893909] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "refresh_cache-fae97132-44b4-4df1-bd34-ba694ea7016a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.894168] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquired lock "refresh_cache-fae97132-44b4-4df1-bd34-ba694ea7016a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.894352] env[69982]: DEBUG nova.network.neutron [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 818.159333] env[69982]: DEBUG nova.compute.manager [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Received event network-changed-94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.159538] env[69982]: DEBUG nova.compute.manager [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Refreshing instance network info cache due to event network-changed-94487a32-cb22-46ea-afe4-0dab4d8d18be. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 818.159655] env[69982]: DEBUG oslo_concurrency.lockutils [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] Acquiring lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.187769] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.218890] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.219197] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.219430] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.219601] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.219781] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.219959] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.220385] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.220655] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.220981] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.221254] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.221583] env[69982]: DEBUG nova.virt.hardware [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.223863] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ff4afb-37f4-440c-b96f-79dc0ade5362 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.233480] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864533, 'name': Rename_Task, 'duration_secs': 0.25607} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.233968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.234324] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance network_info: |[{"id": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "address": "fa:16:3e:cc:ec:de", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94487a32-cb", "ovs_interfaceid": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.234642] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 818.234964] env[69982]: DEBUG oslo_concurrency.lockutils [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] Acquired lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.235180] env[69982]: DEBUG nova.network.neutron [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Refreshing network info cache for port 94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 818.236460] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:ec:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94487a32-cb22-46ea-afe4-0dab4d8d18be', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.245459] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.245692] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfe76335-f0a9-4d93-bf86-0a1d526831c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.250879] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.251279] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6c1f791-fa2a-44e8-82c1-7a4e007f9ffc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.268073] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5f4f4d-28b3-4444-a28b-73a12090f56e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.277030] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 818.277030] env[69982]: value = "task-3864534" [ 818.277030] env[69982]: _type = "Task" [ 818.277030] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.288180] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.288180] env[69982]: value = "task-3864535" [ 818.288180] env[69982]: _type = "Task" [ 818.288180] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.298679] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864534, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.302936] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864535, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.384593] env[69982]: DEBUG nova.scheduler.client.report [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.414524] env[69982]: DEBUG nova.network.neutron [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 818.480126] env[69982]: DEBUG nova.network.neutron [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.788568] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864534, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.803025] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864535, 'name': CreateVM_Task, 'duration_secs': 0.386163} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.803025] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.803025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.803025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.803025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 818.803025] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1ea2444-cce1-459d-b9f3-a7b3f738caff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.809217] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 818.809217] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5215eb6b-2389-a7dc-3b07-d66a95ca2325" [ 818.809217] env[69982]: _type = "Task" [ 818.809217] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.820801] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5215eb6b-2389-a7dc-3b07-d66a95ca2325, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.890109] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.735s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.893022] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 818.895035] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.737s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.900031] env[69982]: DEBUG nova.objects.instance [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lazy-loading 'resources' on Instance uuid 48162423-a117-437e-b171-9a40c7c6f49b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 818.931372] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Successfully updated port: 9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.982549] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Releasing lock "refresh_cache-fae97132-44b4-4df1-bd34-ba694ea7016a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.984092] env[69982]: DEBUG nova.compute.manager [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 818.984092] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 818.984255] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef2525b-e805-4f2a-ba31-fa09167a651c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.994802] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 818.995365] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61b79435-dfda-461a-8090-5e73fc20a6df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.003438] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 819.003438] env[69982]: value = "task-3864536" [ 819.003438] env[69982]: _type = "Task" [ 819.003438] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.012747] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.104321] env[69982]: DEBUG nova.network.neutron [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Updated VIF entry in instance network info cache for port 94487a32-cb22-46ea-afe4-0dab4d8d18be. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 819.104761] env[69982]: DEBUG nova.network.neutron [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Updating instance_info_cache with network_info: [{"id": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "address": "fa:16:3e:cc:ec:de", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94487a32-cb", "ovs_interfaceid": "94487a32-cb22-46ea-afe4-0dab4d8d18be", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.285728] env[69982]: DEBUG oslo_vmware.api [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864534, 'name': PowerOnVM_Task, 'duration_secs': 0.571772} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.286009] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.286228] env[69982]: INFO nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Took 9.52 seconds to spawn the instance on the hypervisor. [ 819.286415] env[69982]: DEBUG nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.287198] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824c6650-ed00-40ea-bce5-06bc1c26b254 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.320324] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5215eb6b-2389-a7dc-3b07-d66a95ca2325, 'name': SearchDatastore_Task, 'duration_secs': 0.023913} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.320650] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.320887] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.321147] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.321299] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.321513] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.321912] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a981d6a-ee88-416a-b036-0c035c77d069 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.331628] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.331824] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.332824] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2af3d3-7839-43e3-976d-22edc5c59741 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.339018] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 819.339018] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d307c-be7e-6d6a-ee32-d8781cfc2524" [ 819.339018] env[69982]: _type = "Task" [ 819.339018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.347431] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d307c-be7e-6d6a-ee32-d8781cfc2524, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.399047] env[69982]: DEBUG nova.compute.utils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.399999] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.400242] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.437683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.438179] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.438179] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 819.448359] env[69982]: DEBUG nova.policy [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493da3ee04094ba4ac17893d999ac99e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc588ded27b49d4826535649105aa88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.514295] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864536, 'name': PowerOffVM_Task, 'duration_secs': 0.253406} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.517280] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 819.517475] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 819.517991] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-851bc2c6-fe1d-4074-a956-b5650d3e3ae8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.545858] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.545858] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.545858] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Deleting the datastore file [datastore2] fae97132-44b4-4df1-bd34-ba694ea7016a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.546063] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39756536-7989-462e-ab9e-7cb1f936c858 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.552385] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for the task: (returnval){ [ 819.552385] env[69982]: value = "task-3864538" [ 819.552385] env[69982]: _type = "Task" [ 819.552385] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.563863] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.608562] env[69982]: DEBUG oslo_concurrency.lockutils [req-6592d2b3-b055-4347-bc8f-b021cabe2ede req-d1592484-672c-4058-baff-8510351313da service nova] Releasing lock "refresh_cache-a70fa652-4726-4bc2-966f-530aaa79ba86" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.808716] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Successfully created port: 82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.810751] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.811284] env[69982]: INFO nova.compute.manager [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Took 44.87 seconds to build instance. [ 819.812164] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 819.852250] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d307c-be7e-6d6a-ee32-d8781cfc2524, 'name': SearchDatastore_Task, 'duration_secs': 0.009545} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.853321] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de08716d-f6c8-4143-a4e4-308dea04103a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.864509] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 819.864509] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52791542-6259-478f-4f76-adc616509544" [ 819.864509] env[69982]: _type = "Task" [ 819.864509] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.877183] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52791542-6259-478f-4f76-adc616509544, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.880046] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.880363] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 819.880837] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2bc8c29-9fe1-4208-ae04-1a317d2deaf2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.888682] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 819.888682] env[69982]: value = "task-3864539" [ 819.888682] env[69982]: _type = "Task" [ 819.888682] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.900959] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864539, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.903566] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 819.979868] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 819.985294] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27553d58-7ade-45fe-8180-fe7bcdeaa32d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.994349] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd18c7a-0b36-493a-82d2-c797d9a9a9fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.029572] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b104b0-44fa-4ac7-be1e-0cc446182685 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.038817] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d56a12c-9ad9-41fa-acd9-ec8eedaf4459 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.054475] env[69982]: DEBUG nova.compute.provider_tree [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.068155] env[69982]: DEBUG oslo_vmware.api [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Task: {'id': task-3864538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103028} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.069050] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.069245] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.069422] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.069593] env[69982]: INFO nova.compute.manager [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Took 1.09 seconds to destroy the instance on the hypervisor. [ 820.069828] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.070026] env[69982]: DEBUG nova.compute.manager [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 820.070155] env[69982]: DEBUG nova.network.neutron [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 820.090107] env[69982]: DEBUG nova.network.neutron [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 820.198322] env[69982]: DEBUG nova.network.neutron [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating instance_info_cache with network_info: [{"id": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "address": "fa:16:3e:19:b1:b8", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b2905f3-5b", "ovs_interfaceid": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.293775] env[69982]: DEBUG nova.compute.manager [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Received event network-vif-plugged-9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.294072] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.295522] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 820.295522] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.295522] env[69982]: DEBUG nova.compute.manager [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] No waiting events found dispatching network-vif-plugged-9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 820.295522] env[69982]: WARNING nova.compute.manager [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Received unexpected event network-vif-plugged-9b2905f3-5b4e-48cf-afd1-247e80a04280 for instance with vm_state building and task_state spawning. [ 820.295522] env[69982]: DEBUG nova.compute.manager [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Received event network-changed-9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 820.295760] env[69982]: DEBUG nova.compute.manager [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Refreshing instance network info cache due to event network-changed-9b2905f3-5b4e-48cf-afd1-247e80a04280. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 820.295760] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Acquiring lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.315310] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0ead81e3-5e7e-47a2-8ad7-b5fc1546362d tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.534s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.323633] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.326486] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 820.326921] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 820.401312] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864539, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.560649] env[69982]: DEBUG nova.scheduler.client.report [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 820.592510] env[69982]: DEBUG nova.network.neutron [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.700967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.701480] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Instance network_info: |[{"id": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "address": "fa:16:3e:19:b1:b8", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b2905f3-5b", "ovs_interfaceid": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 820.701806] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Acquired lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.702041] env[69982]: DEBUG nova.network.neutron [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Refreshing network info cache for port 9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 820.703837] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:b1:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9b2905f3-5b4e-48cf-afd1-247e80a04280', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 820.713079] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Creating folder: Project (38945e679e1c4550b82ada82b9b1b7ec). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.716322] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b91cd3dd-0d5b-4db8-8357-e5780cce3ed4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.727561] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Created folder: Project (38945e679e1c4550b82ada82b9b1b7ec) in parent group-v767796. [ 820.727978] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Creating folder: Instances. Parent ref: group-v767932. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 820.728110] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d85095f-3bdc-453b-a6c5-189b7599bbc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.737533] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Created folder: Instances in parent group-v767932. [ 820.737738] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 820.737937] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 820.738157] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f419f58-cab3-4023-88e9-5cc8144218c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.753190] env[69982]: INFO nova.compute.manager [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Rescuing [ 820.753607] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.753784] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.754192] env[69982]: DEBUG nova.network.neutron [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 820.761857] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 820.761857] env[69982]: value = "task-3864542" [ 820.761857] env[69982]: _type = "Task" [ 820.761857] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.774417] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864542, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.825633] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.830114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.905898] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864539, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.918048] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.952585] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 820.952910] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.953050] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 820.953668] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.953668] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 820.953668] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 820.953852] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 820.953987] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 820.954199] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 820.954383] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 820.954570] env[69982]: DEBUG nova.virt.hardware [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 820.955573] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200396d6-25f9-474e-94a9-f1d6d4db38e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.967918] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52f2df5-873d-41c2-8e72-07a3c380937d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.996544] env[69982]: DEBUG nova.network.neutron [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updated VIF entry in instance network info cache for port 9b2905f3-5b4e-48cf-afd1-247e80a04280. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 820.996961] env[69982]: DEBUG nova.network.neutron [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating instance_info_cache with network_info: [{"id": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "address": "fa:16:3e:19:b1:b8", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b2905f3-5b", "ovs_interfaceid": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.068753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.174s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.071258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.926s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.072850] env[69982]: INFO nova.compute.claims [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.090633] env[69982]: INFO nova.scheduler.client.report [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleted allocations for instance 48162423-a117-437e-b171-9a40c7c6f49b [ 821.094800] env[69982]: INFO nova.compute.manager [-] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Took 1.02 seconds to deallocate network for instance. [ 821.273914] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864542, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.348205] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.402247] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864539, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.419613] env[69982]: DEBUG nova.compute.manager [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Received event network-vif-plugged-82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.419728] env[69982]: DEBUG oslo_concurrency.lockutils [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] Acquiring lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.421921] env[69982]: DEBUG oslo_concurrency.lockutils [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.421921] env[69982]: DEBUG oslo_concurrency.lockutils [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.421921] env[69982]: DEBUG nova.compute.manager [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] No waiting events found dispatching network-vif-plugged-82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.421921] env[69982]: WARNING nova.compute.manager [req-d43f492a-ecd3-4036-867a-eb0ece71919c req-00bf16b4-3c94-4424-a9fc-39825f502406 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Received unexpected event network-vif-plugged-82903e23-208f-46d2-81eb-689af6de8c42 for instance with vm_state building and task_state spawning. [ 821.500471] env[69982]: DEBUG oslo_concurrency.lockutils [req-034111f2-1c94-42fd-a7ab-99d0a4f2d636 req-786725a9-af76-41ad-a062-31f420ea3562 service nova] Releasing lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.513258] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Successfully updated port: 82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.516401] env[69982]: DEBUG nova.network.neutron [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updating instance_info_cache with network_info: [{"id": "e56d9c29-71a4-4d37-b444-7c544cd50695", "address": "fa:16:3e:c7:5f:e0", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape56d9c29-71", "ovs_interfaceid": "e56d9c29-71a4-4d37-b444-7c544cd50695", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.597818] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3694b937-5e3d-4684-9083-bf11bce8cffd tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "48162423-a117-437e-b171-9a40c7c6f49b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.218s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.602017] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.774284] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864542, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.900813] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864539, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.558863} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.901646] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 821.901646] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 821.901646] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fce73d0d-9e76-4e08-887a-839ada280237 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.909092] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 821.909092] env[69982]: value = "task-3864543" [ 821.909092] env[69982]: _type = "Task" [ 821.909092] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.918863] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.017852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "refresh_cache-a4064177-051b-4ec8-a1fc-fa5d299add8b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.020242] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.020412] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.020554] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 822.277346] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864542, 'name': CreateVM_Task, 'duration_secs': 1.373601} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.277346] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.277833] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.278014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.278408] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.278658] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09618370-5d4c-4802-b7b3-76a3cbe79b2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.285237] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 822.285237] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a541b-0ec1-e9ea-329b-45587fe3aa68" [ 822.285237] env[69982]: _type = "Task" [ 822.285237] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.294977] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a541b-0ec1-e9ea-329b-45587fe3aa68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.421767] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069543} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.422071] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.422910] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e8b5bc-961d-4fe7-b151-7d5fc224f040 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.447222] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.450140] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fab35488-60b7-4070-ba57-babc0645b2a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.471516] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 822.471516] env[69982]: value = "task-3864544" [ 822.471516] env[69982]: _type = "Task" [ 822.471516] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.482311] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.612924] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4327fe-4811-46c0-8ea4-4c6aadbe4e6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.621222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293284d6-798e-4f9c-b573-a4cc5f3dd6f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.658072] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 822.659429] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756910a1-b7c1-4c85-b4bf-894553c786e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.668336] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e130ce8a-7ba9-4f52-ba63-59cdff996f5b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.684422] env[69982]: DEBUG nova.compute.provider_tree [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 822.796806] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a541b-0ec1-e9ea-329b-45587fe3aa68, 'name': SearchDatastore_Task, 'duration_secs': 0.010822} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.797198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.797444] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.797677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.797827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.798015] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.798303] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abc518ae-538b-4c9e-b991-eb9ee1de0bbe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.807543] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.807730] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.808523] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-654122b9-de6d-4640-a017-6fe412020097 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.814177] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 822.814177] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525634a8-7de8-e747-17e8-020bf6604c20" [ 822.814177] env[69982]: _type = "Task" [ 822.814177] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.822542] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525634a8-7de8-e747-17e8-020bf6604c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.880013] env[69982]: DEBUG nova.network.neutron [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Updating instance_info_cache with network_info: [{"id": "82903e23-208f-46d2-81eb-689af6de8c42", "address": "fa:16:3e:10:4c:52", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82903e23-20", "ovs_interfaceid": "82903e23-208f-46d2-81eb-689af6de8c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.982029] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.190700] env[69982]: DEBUG nova.scheduler.client.report [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.327751] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525634a8-7de8-e747-17e8-020bf6604c20, 'name': SearchDatastore_Task, 'duration_secs': 0.009317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.328462] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e1ae130-af58-4bd0-ac6d-2b86369e6972 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.336044] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 823.336044] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0cea7-3244-a44d-4858-91febbae6d6a" [ 823.336044] env[69982]: _type = "Task" [ 823.336044] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.345048] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0cea7-3244-a44d-4858-91febbae6d6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.382826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.383192] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance network_info: |[{"id": "82903e23-208f-46d2-81eb-689af6de8c42", "address": "fa:16:3e:10:4c:52", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82903e23-20", "ovs_interfaceid": "82903e23-208f-46d2-81eb-689af6de8c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.383629] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:4c:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31a7f15-a808-4199-9071-31fd05e316ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82903e23-208f-46d2-81eb-689af6de8c42', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.392519] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.392807] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.393089] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c856ace6-bb8f-433e-85d1-fc3983f01cce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.417122] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.417122] env[69982]: value = "task-3864545" [ 823.417122] env[69982]: _type = "Task" [ 823.417122] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.427564] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864545, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.452498] env[69982]: DEBUG nova.compute.manager [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Received event network-changed-82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.452700] env[69982]: DEBUG nova.compute.manager [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Refreshing instance network info cache due to event network-changed-82903e23-208f-46d2-81eb-689af6de8c42. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.452925] env[69982]: DEBUG oslo_concurrency.lockutils [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] Acquiring lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.453091] env[69982]: DEBUG oslo_concurrency.lockutils [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] Acquired lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.453303] env[69982]: DEBUG nova.network.neutron [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Refreshing network info cache for port 82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 823.483905] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864544, 'name': ReconfigVM_Task, 'duration_secs': 0.9097} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.484339] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Reconfigured VM instance instance-0000002e to attach disk [datastore1] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.484920] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c0ec6f2-449d-4383-a0f1-cd22a60ebfd9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.492206] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 823.492206] env[69982]: value = "task-3864546" [ 823.492206] env[69982]: _type = "Task" [ 823.492206] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.501510] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864546, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.560206] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.560658] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b89b15c6-4aff-4466-90fe-41bdb63dd19d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.569173] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 823.569173] env[69982]: value = "task-3864547" [ 823.569173] env[69982]: _type = "Task" [ 823.569173] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.578060] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.697036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.697549] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 823.701750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.858s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.848447] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0cea7-3244-a44d-4858-91febbae6d6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010104} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.848629] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.848891] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9a1de08e-3206-44cc-8d34-a5527faf9684/9a1de08e-3206-44cc-8d34-a5527faf9684.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.849195] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b80d0f85-08f7-4bca-bb92-369207b68f39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.857882] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 823.857882] env[69982]: value = "task-3864548" [ 823.857882] env[69982]: _type = "Task" [ 823.857882] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.868510] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.918626] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.918905] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.919160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.919415] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.919529] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.925828] env[69982]: INFO nova.compute.manager [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Terminating instance [ 823.936945] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864545, 'name': CreateVM_Task, 'duration_secs': 0.304996} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.937294] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.938122] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.938302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.938660] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.939351] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9499cb2d-9097-4237-8bbe-32588ab905c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.945624] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 823.945624] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a352e0-dd48-9bde-86ce-820a9f3cda8a" [ 823.945624] env[69982]: _type = "Task" [ 823.945624] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.960159] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a352e0-dd48-9bde-86ce-820a9f3cda8a, 'name': SearchDatastore_Task, 'duration_secs': 0.009643} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.960159] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.960159] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 823.960606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.960606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.960729] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 823.961375] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02949a78-93b3-44a6-855a-e1e09b00a50f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.970570] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 823.970786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 823.971924] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d09af3f3-1529-49e0-a3e7-f60f865e74f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.979032] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 823.979032] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522a8865-e028-b655-5bfc-93ead72396eb" [ 823.979032] env[69982]: _type = "Task" [ 823.979032] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.987413] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522a8865-e028-b655-5bfc-93ead72396eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.003669] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864546, 'name': Rename_Task, 'duration_secs': 0.146195} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.003961] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 824.004240] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc8eb889-63d2-4b97-9861-6ba1d54cc507 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.012069] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 824.012069] env[69982]: value = "task-3864549" [ 824.012069] env[69982]: _type = "Task" [ 824.012069] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.021637] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.083024] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864547, 'name': PowerOffVM_Task, 'duration_secs': 0.177768} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.083307] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.084183] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab74d23-713f-4e09-b9eb-e7aae3f88022 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.104215] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7832b7db-5682-4fb7-8069-355e3f1ad172 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.151887] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.152280] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f612bf8-0421-4e09-86bb-bb4f4a87aa86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.163541] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 824.163541] env[69982]: value = "task-3864550" [ 824.163541] env[69982]: _type = "Task" [ 824.163541] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.176870] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 824.176870] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.176870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.176870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.176870] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.177261] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9bd764f0-c497-4999-91af-ebcd7a8cfb6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.188282] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.189063] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.189460] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4cf116f-9c72-4941-8151-5a9ae7234d0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.201247] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 824.201247] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522bbfea-2c0e-cacb-661a-cba3abef4074" [ 824.201247] env[69982]: _type = "Task" [ 824.201247] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.207442] env[69982]: INFO nova.compute.claims [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.213538] env[69982]: DEBUG nova.compute.utils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 824.214809] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.214988] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.220546] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522bbfea-2c0e-cacb-661a-cba3abef4074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.282499] env[69982]: DEBUG nova.policy [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '005bbd5e6a314bf48f443ddc050f0a75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18dcc70df5e144e3b4f0592b0112aaf7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.294196] env[69982]: DEBUG nova.network.neutron [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Updated VIF entry in instance network info cache for port 82903e23-208f-46d2-81eb-689af6de8c42. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 824.294196] env[69982]: DEBUG nova.network.neutron [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Updating instance_info_cache with network_info: [{"id": "82903e23-208f-46d2-81eb-689af6de8c42", "address": "fa:16:3e:10:4c:52", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82903e23-20", "ovs_interfaceid": "82903e23-208f-46d2-81eb-689af6de8c42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.369792] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864548, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479457} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.370075] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9a1de08e-3206-44cc-8d34-a5527faf9684/9a1de08e-3206-44cc-8d34-a5527faf9684.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.370393] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.370755] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d05f774-d71a-4a98-95b3-843e29547311 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.378506] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 824.378506] env[69982]: value = "task-3864551" [ 824.378506] env[69982]: _type = "Task" [ 824.378506] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.389503] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864551, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.438029] env[69982]: DEBUG nova.compute.manager [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 824.438029] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 824.439579] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a4ad6a-801d-436c-af48-3e0b02a75544 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.447390] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 824.447604] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18580de8-f94c-49e6-928d-54ce7397aeb1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.455078] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 824.455078] env[69982]: value = "task-3864552" [ 824.455078] env[69982]: _type = "Task" [ 824.455078] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.465135] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864552, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.489103] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522a8865-e028-b655-5bfc-93ead72396eb, 'name': SearchDatastore_Task, 'duration_secs': 0.008715} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.490702] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b93afb08-bddf-4839-9e11-7e42fc0f6720 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.495728] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 824.495728] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab923b-51d7-1c30-eb73-69eadff767f6" [ 824.495728] env[69982]: _type = "Task" [ 824.495728] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.506256] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab923b-51d7-1c30-eb73-69eadff767f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.522343] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864549, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.657831] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Successfully created port: c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.712440] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522bbfea-2c0e-cacb-661a-cba3abef4074, 'name': SearchDatastore_Task, 'duration_secs': 0.025285} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.713472] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5fc69fd-0432-4cf8-a94a-d75c707c3cde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.717456] env[69982]: INFO nova.compute.resource_tracker [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating resource usage from migration 535190ea-b5b3-4b17-a596-f0eafa3d66dd [ 824.720548] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 824.726864] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 824.726864] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52670d24-18b2-1e2f-0a64-3b2a88074cd5" [ 824.726864] env[69982]: _type = "Task" [ 824.726864] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.736451] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52670d24-18b2-1e2f-0a64-3b2a88074cd5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.798073] env[69982]: DEBUG oslo_concurrency.lockutils [req-80fe7fd7-56ac-45ec-9722-073709b9d3d7 req-c243c3fd-e2e9-4bef-a1f1-77d8df708871 service nova] Releasing lock "refresh_cache-c563267f-7699-4bd1-83cf-59ecef500ac3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.891453] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864551, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065303} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.891889] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.892987] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a62a955-fa3d-4762-8b41-8608b55ba2db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.921419] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 9a1de08e-3206-44cc-8d34-a5527faf9684/9a1de08e-3206-44cc-8d34-a5527faf9684.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.924505] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44cd7254-1a6a-4950-aba6-7de27431046e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.946238] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 824.946238] env[69982]: value = "task-3864553" [ 824.946238] env[69982]: _type = "Task" [ 824.946238] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.958692] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864553, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.971410] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864552, 'name': PowerOffVM_Task, 'duration_secs': 0.2849} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.971723] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 824.971946] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 824.972367] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18cd60f6-f111-4acd-97f5-4961608044d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.009041] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab923b-51d7-1c30-eb73-69eadff767f6, 'name': SearchDatastore_Task, 'duration_secs': 0.010174} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.011984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.012800] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] c563267f-7699-4bd1-83cf-59ecef500ac3/c563267f-7699-4bd1-83cf-59ecef500ac3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.014066] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8c230d66-afcd-49e8-9408-677e46287c45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.023865] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 825.023865] env[69982]: value = "task-3864555" [ 825.023865] env[69982]: _type = "Task" [ 825.023865] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.029887] env[69982]: DEBUG oslo_vmware.api [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864549, 'name': PowerOnVM_Task, 'duration_secs': 0.628226} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.033475] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 825.033726] env[69982]: INFO nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Took 9.57 seconds to spawn the instance on the hypervisor. [ 825.033917] env[69982]: DEBUG nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.034732] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6cb5e8-b814-4e93-b48f-6785355db530 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.044606] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.050095] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 825.050652] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 825.050858] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleting the datastore file [datastore2] 2d554902-bf28-4ee2-b9d6-4219e54246fc {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 825.055123] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ea10ce7-9e6f-4a13-ae5d-1e14efa75faf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.061991] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for the task: (returnval){ [ 825.061991] env[69982]: value = "task-3864556" [ 825.061991] env[69982]: _type = "Task" [ 825.061991] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.073133] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864556, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.244052] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52670d24-18b2-1e2f-0a64-3b2a88074cd5, 'name': SearchDatastore_Task, 'duration_secs': 0.011375} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.244362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.244633] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 825.244921] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8db8a923-c828-47a7-b93b-00ae19246dcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.257918] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 825.257918] env[69982]: value = "task-3864557" [ 825.257918] env[69982]: _type = "Task" [ 825.257918] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.268744] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.336030] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c073cb2-1915-4efc-a841-9dfe4e116c8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.345724] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7ba416-c62d-4962-b0c6-3570b6fbe136 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.383523] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f43ccc-acbb-43e7-a158-3b50ad9877dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.395794] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943f531f-9018-4d0e-a3eb-4ab45827e9da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.417637] env[69982]: DEBUG nova.compute.provider_tree [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.457668] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864553, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.539297] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864555, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.566761] env[69982]: INFO nova.compute.manager [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Took 47.56 seconds to build instance. [ 825.573560] env[69982]: DEBUG oslo_vmware.api [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Task: {'id': task-3864556, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.573685] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 825.573812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 825.573986] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 825.574208] env[69982]: INFO nova.compute.manager [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 825.574403] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.574992] env[69982]: DEBUG nova.compute.manager [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 825.574992] env[69982]: DEBUG nova.network.neutron [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 825.733780] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.773737] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864557, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.776835] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.777151] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.777321] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.777850] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.778033] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.778192] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.778446] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.778623] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.778815] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.779027] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.779216] env[69982]: DEBUG nova.virt.hardware [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.780189] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be2285b-2590-41fe-b903-ecfa6ba0a153 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.791336] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a627ca-5ab4-43d8-ab9d-86764969f0af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.922608] env[69982]: DEBUG nova.scheduler.client.report [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.956511] env[69982]: DEBUG nova.compute.manager [req-95fe7c46-2568-4f3a-885b-37aa9f5a19db req-cacfab59-5e4d-4dd8-9702-0d85476e8163 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Received event network-vif-deleted-d73ed1df-de67-4db7-82b5-7f693bab3f55 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.956817] env[69982]: INFO nova.compute.manager [req-95fe7c46-2568-4f3a-885b-37aa9f5a19db req-cacfab59-5e4d-4dd8-9702-0d85476e8163 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Neutron deleted interface d73ed1df-de67-4db7-82b5-7f693bab3f55; detaching it from the instance and deleting it from the info cache [ 825.957014] env[69982]: DEBUG nova.network.neutron [req-95fe7c46-2568-4f3a-885b-37aa9f5a19db req-cacfab59-5e4d-4dd8-9702-0d85476e8163 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.962328] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864553, 'name': ReconfigVM_Task, 'duration_secs': 0.536746} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.962923] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 9a1de08e-3206-44cc-8d34-a5527faf9684/9a1de08e-3206-44cc-8d34-a5527faf9684.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.963656] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e47b677-2c00-4418-bdd3-7379b6592dec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.971284] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 825.971284] env[69982]: value = "task-3864558" [ 825.971284] env[69982]: _type = "Task" [ 825.971284] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.981693] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864558, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.040886] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521667} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.041232] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] c563267f-7699-4bd1-83cf-59ecef500ac3/c563267f-7699-4bd1-83cf-59ecef500ac3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.041427] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.041686] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-89afe547-8f7e-4c80-9c63-30cd350e01ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.053022] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 826.053022] env[69982]: value = "task-3864559" [ 826.053022] env[69982]: _type = "Task" [ 826.053022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.063488] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864559, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.069611] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6b484b92-cb86-4b73-8c13-b08fbcd2c42a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.595s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.268817] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.269147] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 826.269947] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a356712b-7a5e-4864-bea0-5c3f22991adb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.309684] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.309684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de41c4da-673e-49f4-8cf6-a4e2b0ddc82e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.329107] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 826.329107] env[69982]: value = "task-3864560" [ 826.329107] env[69982]: _type = "Task" [ 826.329107] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.338525] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864560, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.370388] env[69982]: DEBUG nova.compute.manager [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Received event network-vif-plugged-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 826.371965] env[69982]: DEBUG oslo_concurrency.lockutils [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] Acquiring lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.371965] env[69982]: DEBUG oslo_concurrency.lockutils [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.371965] env[69982]: DEBUG oslo_concurrency.lockutils [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.371965] env[69982]: DEBUG nova.compute.manager [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] No waiting events found dispatching network-vif-plugged-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.371965] env[69982]: WARNING nova.compute.manager [req-581d06c0-52cf-4aca-9c98-b767b8599728 req-d46f4e01-e56b-4239-b29d-8030987a4173 service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Received unexpected event network-vif-plugged-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc for instance with vm_state building and task_state spawning. [ 826.408735] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Successfully updated port: c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 826.429763] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.728s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.430025] env[69982]: INFO nova.compute.manager [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Migrating [ 826.440346] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 32.947s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.440549] env[69982]: DEBUG nova.objects.instance [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 826.443697] env[69982]: DEBUG nova.network.neutron [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.463502] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6adf2161-9f5c-46ee-b2b7-751291a3a24e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.479958] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3222ba-73c9-4a2d-92c5-d30fab1fb998 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.500994] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864558, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.521251] env[69982]: DEBUG nova.compute.manager [req-95fe7c46-2568-4f3a-885b-37aa9f5a19db req-cacfab59-5e4d-4dd8-9702-0d85476e8163 service nova] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Detach interface failed, port_id=d73ed1df-de67-4db7-82b5-7f693bab3f55, reason: Instance 2d554902-bf28-4ee2-b9d6-4219e54246fc could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 826.562046] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864559, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069672} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.562300] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.563245] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f0ced4-f3ed-499f-ab3e-158ce913da0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.577819] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.589048] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] c563267f-7699-4bd1-83cf-59ecef500ac3/c563267f-7699-4bd1-83cf-59ecef500ac3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.589600] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96ace2a7-648d-485d-adb9-82b2f073af11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.612356] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 826.612356] env[69982]: value = "task-3864561" [ 826.612356] env[69982]: _type = "Task" [ 826.612356] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.621681] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864561, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.840582] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864560, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.913300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.913300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.913300] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 826.928159] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.928543] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.928744] env[69982]: DEBUG nova.compute.manager [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 826.929710] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82502b0d-34d4-4986-9a0d-104a16c75f80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.939101] env[69982]: DEBUG nova.compute.manager [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 826.939934] env[69982]: DEBUG nova.objects.instance [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'flavor' on Instance uuid a70fa652-4726-4bc2-966f-530aaa79ba86 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.949905] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.950185] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.950453] env[69982]: DEBUG nova.network.neutron [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 826.952111] env[69982]: INFO nova.compute.manager [-] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Took 1.38 seconds to deallocate network for instance. [ 826.985945] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864558, 'name': Rename_Task, 'duration_secs': 0.581321} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.986358] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.986701] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e975dbfa-5eab-4f66-89d5-6de035ffe23e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.995578] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 826.995578] env[69982]: value = "task-3864562" [ 826.995578] env[69982]: _type = "Task" [ 826.995578] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.012315] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864562, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.113298] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.124756] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864561, 'name': ReconfigVM_Task, 'duration_secs': 0.307791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.125097] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Reconfigured VM instance instance-00000030 to attach disk [datastore1] c563267f-7699-4bd1-83cf-59ecef500ac3/c563267f-7699-4bd1-83cf-59ecef500ac3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.125809] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f595772-3a3f-4b35-b89b-fc7145d83727 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.134159] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 827.134159] env[69982]: value = "task-3864563" [ 827.134159] env[69982]: _type = "Task" [ 827.134159] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.148210] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864563, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.342450] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864560, 'name': ReconfigVM_Task, 'duration_secs': 0.994397} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.342789] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Reconfigured VM instance instance-0000002d to attach disk [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.343846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f0dbbc-0d13-4aae-aa9d-29cf62020cb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.372978] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c58a14f-0113-4a96-8ea7-296e50d7a58e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.393526] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 827.393526] env[69982]: value = "task-3864564" [ 827.393526] env[69982]: _type = "Task" [ 827.393526] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.404246] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864564, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.460498] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f12989d-4cb6-4b24-a5a0-cff1a7750cdc tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.465140] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.352s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.467417] env[69982]: INFO nova.compute.claims [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.472211] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 827.476407] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.511012] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864562, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.645204] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864563, 'name': Rename_Task, 'duration_secs': 0.203174} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.645433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.645687] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a59ff182-0c33-4d42-9125-747d391c1d39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.652664] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 827.652664] env[69982]: value = "task-3864565" [ 827.652664] env[69982]: _type = "Task" [ 827.652664] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.662528] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.677143] env[69982]: DEBUG nova.network.neutron [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Updating instance_info_cache with network_info: [{"id": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "address": "fa:16:3e:b5:38:cf", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc905d5aa-25", "ovs_interfaceid": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.753464] env[69982]: DEBUG nova.network.neutron [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.903804] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864564, 'name': ReconfigVM_Task, 'duration_secs': 0.208873} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.905196] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 827.905196] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61cbed0c-c796-44cf-a408-c3ed37e56b8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.911471] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 827.911471] env[69982]: value = "task-3864566" [ 827.911471] env[69982]: _type = "Task" [ 827.911471] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.919535] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.951219] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 827.951599] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce3acca1-d039-4df3-ba45-e06a6a26c613 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.960377] env[69982]: DEBUG oslo_vmware.api [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 827.960377] env[69982]: value = "task-3864567" [ 827.960377] env[69982]: _type = "Task" [ 827.960377] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.969911] env[69982]: DEBUG oslo_vmware.api [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.009972] env[69982]: DEBUG oslo_vmware.api [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864562, 'name': PowerOnVM_Task, 'duration_secs': 0.517085} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.010279] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.010513] env[69982]: INFO nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Took 9.82 seconds to spawn the instance on the hypervisor. [ 828.010751] env[69982]: DEBUG nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.011658] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a125f1a0-18c3-42e8-b22d-f0704d940c80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.166889] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864565, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.179696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.181163] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Instance network_info: |[{"id": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "address": "fa:16:3e:b5:38:cf", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc905d5aa-25", "ovs_interfaceid": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 828.181398] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:38:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '767a3a48-41d4-4a0c-961d-0024837f63bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c905d5aa-25d9-454f-8fbf-4f3622b4d9fc', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 828.188990] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Creating folder: Project (18dcc70df5e144e3b4f0592b0112aaf7). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.189355] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d3f503ca-9e70-4577-b25c-e38d7013afe3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.202411] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Created folder: Project (18dcc70df5e144e3b4f0592b0112aaf7) in parent group-v767796. [ 828.202669] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Creating folder: Instances. Parent ref: group-v767936. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 828.202900] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fa2b7d4-aebf-4a9b-b742-d3a4e623d608 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.216025] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Created folder: Instances in parent group-v767936. [ 828.216339] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.216776] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 828.216888] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac1595ee-544f-47f8-9c1b-7be0f9cd0028 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.242969] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 828.242969] env[69982]: value = "task-3864570" [ 828.242969] env[69982]: _type = "Task" [ 828.242969] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.256499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.259475] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864570, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.404150] env[69982]: DEBUG nova.compute.manager [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Received event network-changed-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.404447] env[69982]: DEBUG nova.compute.manager [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Refreshing instance network info cache due to event network-changed-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.404683] env[69982]: DEBUG oslo_concurrency.lockutils [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] Acquiring lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.404944] env[69982]: DEBUG oslo_concurrency.lockutils [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] Acquired lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.405312] env[69982]: DEBUG nova.network.neutron [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Refreshing network info cache for port c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 828.426904] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864566, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.471977] env[69982]: DEBUG oslo_vmware.api [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864567, 'name': PowerOffVM_Task, 'duration_secs': 0.302197} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.472338] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.472645] env[69982]: DEBUG nova.compute.manager [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.473710] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e698cb2-368f-4a77-9376-db817797bfe5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.534572] env[69982]: INFO nova.compute.manager [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Took 47.13 seconds to build instance. [ 828.665464] env[69982]: DEBUG oslo_vmware.api [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864565, 'name': PowerOnVM_Task, 'duration_secs': 0.842713} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.669099] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.669532] env[69982]: INFO nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Took 7.75 seconds to spawn the instance on the hypervisor. [ 828.669901] env[69982]: DEBUG nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.671790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12336582-aa5a-4157-a493-9e0c2f19839d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.755029] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864570, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.925183] env[69982]: DEBUG oslo_vmware.api [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864566, 'name': PowerOnVM_Task, 'duration_secs': 0.580119} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.925183] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.929688] env[69982]: DEBUG nova.compute.manager [None req-5072e1b4-9ede-4f3e-88f4-2d04fbc6d223 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.930782] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2020ab3b-dc2b-41d8-8734-8c691e7db363 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.992479] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e5801db0-136a-4f84-aea0-4d52f28f6bcd tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.064s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.037083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8dc13d2f-8e94-4f6f-ae53-0f4f659fe669 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.791s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.104529] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9cf184-484f-4cbe-8275-d3452eb7d1b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.113333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-946b4f42-a31e-45da-aaa0-ed5d54c1ec75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.149349] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad842801-821b-4fdc-81ff-d6ffb34a9210 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.159530] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d3196b-4064-4e9a-b4d2-27c352a9456c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.183693] env[69982]: DEBUG nova.compute.provider_tree [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.197220] env[69982]: INFO nova.compute.manager [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Took 43.67 seconds to build instance. [ 829.254166] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864570, 'name': CreateVM_Task, 'duration_secs': 0.76805} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.254340] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.255066] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.255286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.255713] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 829.255830] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-656239fe-0fef-44d2-8ccd-cac66c79fe07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.263895] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 829.263895] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274918a-2ff0-70de-5f6b-38dbce1679f5" [ 829.263895] env[69982]: _type = "Task" [ 829.263895] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.274498] env[69982]: DEBUG nova.network.neutron [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Updated VIF entry in instance network info cache for port c905d5aa-25d9-454f-8fbf-4f3622b4d9fc. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 829.275039] env[69982]: DEBUG nova.network.neutron [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Updating instance_info_cache with network_info: [{"id": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "address": "fa:16:3e:b5:38:cf", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc905d5aa-25", "ovs_interfaceid": "c905d5aa-25d9-454f-8fbf-4f3622b4d9fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.281494] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274918a-2ff0-70de-5f6b-38dbce1679f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.542272] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 829.686669] env[69982]: DEBUG nova.scheduler.client.report [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 829.701281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94cea0d3-fd6e-4eab-b0f6-656f20339882 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.280s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.779215] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5274918a-2ff0-70de-5f6b-38dbce1679f5, 'name': SearchDatastore_Task, 'duration_secs': 0.023073} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.779524] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.779758] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 829.779993] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.780155] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.780339] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 829.780633] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50246ee9-a253-4270-94d8-71f20e7a74a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.783877] env[69982]: DEBUG oslo_concurrency.lockutils [req-cb0216d2-d597-40df-befc-426b4cdbf49f req-f171655c-753b-45b6-9a75-5b38080af98a service nova] Releasing lock "refresh_cache-a427dc30-7e0f-4313-a8e9-f76451e4a112" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.785548] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96b9d4e-f0fd-48e6-ac89-a6847bc78dc6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.805176] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 829.809991] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 829.811134] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 829.811222] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3f7581d-f0db-4846-9b1b-d01944b6d396 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.818164] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 829.818164] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b6811a-5671-3af9-ad21-e879c2e46576" [ 829.818164] env[69982]: _type = "Task" [ 829.818164] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.829291] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b6811a-5671-3af9-ad21-e879c2e46576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.914538] env[69982]: INFO nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Rebuilding instance [ 829.964786] env[69982]: DEBUG nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.965990] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb1b409-8870-4b34-88df-4d0b3178dd9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.003170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4223cf43-be38-492a-9d9e-ea104f58f017 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.012834] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Suspending the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 830.013132] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-00ba6cb5-b752-476b-9395-9511eaab1b53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.021064] env[69982]: DEBUG oslo_vmware.api [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 830.021064] env[69982]: value = "task-3864571" [ 830.021064] env[69982]: _type = "Task" [ 830.021064] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.030629] env[69982]: DEBUG oslo_vmware.api [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864571, 'name': SuspendVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.064314] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.192035] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.727s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.192554] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 830.195363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.675s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.197529] env[69982]: INFO nova.compute.claims [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.205036] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.314018] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.314368] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba21120a-b12c-4742-97f3-27945d162fd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.326824] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 830.326824] env[69982]: value = "task-3864572" [ 830.326824] env[69982]: _type = "Task" [ 830.326824] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.337465] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b6811a-5671-3af9-ad21-e879c2e46576, 'name': SearchDatastore_Task, 'duration_secs': 0.01171} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.339239] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d7f5881-ff46-4a9b-9fa0-45d3a6d81ff5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.344957] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.349665] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 830.349665] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238247b-6a17-c8c3-7fdd-a5ab138b3d74" [ 830.349665] env[69982]: _type = "Task" [ 830.349665] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.359480] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238247b-6a17-c8c3-7fdd-a5ab138b3d74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.534137] env[69982]: DEBUG oslo_vmware.api [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864571, 'name': SuspendVM_Task} progress is 54%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.623525] env[69982]: DEBUG nova.compute.manager [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Received event network-changed-9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.623772] env[69982]: DEBUG nova.compute.manager [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Refreshing instance network info cache due to event network-changed-9b2905f3-5b4e-48cf-afd1-247e80a04280. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.623932] env[69982]: DEBUG oslo_concurrency.lockutils [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] Acquiring lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.624149] env[69982]: DEBUG oslo_concurrency.lockutils [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] Acquired lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.624246] env[69982]: DEBUG nova.network.neutron [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Refreshing network info cache for port 9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 830.703360] env[69982]: DEBUG nova.compute.utils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 830.708119] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 830.733674] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.789831] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.790632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.790632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.790632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.791302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.793718] env[69982]: INFO nova.compute.manager [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Terminating instance [ 830.841727] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864572, 'name': PowerOffVM_Task, 'duration_secs': 0.261305} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.842061] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.842222] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 830.860629] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238247b-6a17-c8c3-7fdd-a5ab138b3d74, 'name': SearchDatastore_Task, 'duration_secs': 0.012415} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.860995] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.861272] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a427dc30-7e0f-4313-a8e9-f76451e4a112/a427dc30-7e0f-4313-a8e9-f76451e4a112.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 830.861862] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc513adc-6224-4a72-9ed6-352fe5876750 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.869371] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 830.869371] env[69982]: value = "task-3864573" [ 830.869371] env[69982]: _type = "Task" [ 830.869371] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.878696] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.981590] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 830.981999] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7d91c40-74ce-412f-8e8c-54a66e8c95dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.989792] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 830.989792] env[69982]: value = "task-3864574" [ 830.989792] env[69982]: _type = "Task" [ 830.989792] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.002829] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a61e3d25-9064-4f18-b7f1-0045b705571a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.003102] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.003433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 831.003723] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.004449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4430d1-bae2-47af-9d99-291bc1356078 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.012786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 831.014066] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ceaf3a16-5332-449c-bf79-cff07c2d78e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.031645] env[69982]: DEBUG oslo_vmware.api [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864571, 'name': SuspendVM_Task, 'duration_secs': 0.734864} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.031941] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Suspended the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 831.032131] env[69982]: DEBUG nova.compute.manager [None req-d4cabd43-49c5-49ec-a72d-fcf6c176780b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.033064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1993afc4-55cb-4abf-80ee-4912b89a5e85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.083523] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.083838] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.084057] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore1] a70fa652-4726-4bc2-966f-530aaa79ba86 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.084382] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c599cf57-9086-4c65-9615-d37f0996d807 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.098117] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 831.098117] env[69982]: value = "task-3864576" [ 831.098117] env[69982]: _type = "Task" [ 831.098117] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.110954] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.208530] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 831.298829] env[69982]: DEBUG nova.compute.manager [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 831.298829] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.300046] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35708d9a-4494-406d-8b60-87b2fd0690f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.314430] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.314729] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13fe9c2d-ff61-47d0-b454-9089b9315877 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.327895] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 831.327895] env[69982]: value = "task-3864577" [ 831.327895] env[69982]: _type = "Task" [ 831.327895] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.351533] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 831.351809] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.351955] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 831.352497] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.352497] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 831.352497] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 831.352776] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 831.352833] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 831.353091] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 831.353278] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 831.353335] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 831.358764] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.362070] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46d1acea-66a8-44ae-878e-0f53ff6a531e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.386056] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864573, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.390636] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 831.390636] env[69982]: value = "task-3864578" [ 831.390636] env[69982]: _type = "Task" [ 831.390636] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.436230] env[69982]: DEBUG nova.network.neutron [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updated VIF entry in instance network info cache for port 9b2905f3-5b4e-48cf-afd1-247e80a04280. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 831.436843] env[69982]: DEBUG nova.network.neutron [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating instance_info_cache with network_info: [{"id": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "address": "fa:16:3e:19:b1:b8", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b2905f3-5b", "ovs_interfaceid": "9b2905f3-5b4e-48cf-afd1-247e80a04280", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.612696] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324942} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.612696] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 831.612696] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 831.612696] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 831.813732] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d9c096-b4a2-47e1-8b70-b21b6ee6756f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.822711] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02be8db3-a267-4d3d-bbfb-3cf2ec8c421d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.857429] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737ad02b-79b2-4bfe-ab33-9f2e9f76ddec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.865932] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864577, 'name': PowerOffVM_Task, 'duration_secs': 0.432378} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.868234] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.868408] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 831.868707] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-daeef7c2-a56d-4ec0-ba26-c7bdbf82c783 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.871627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b30278-d9fa-4802-8d6c-8c6eeb0f25dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.891261] env[69982]: DEBUG nova.compute.provider_tree [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.895942] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544991} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.899366] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a427dc30-7e0f-4313-a8e9-f76451e4a112/a427dc30-7e0f-4313-a8e9-f76451e4a112.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 831.899602] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 831.899857] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39043233-d15b-40f4-9460-b42d54e56447 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.907571] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864578, 'name': ReconfigVM_Task, 'duration_secs': 0.382995} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.909862] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 831.917024] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 831.917024] env[69982]: value = "task-3864580" [ 831.917024] env[69982]: _type = "Task" [ 831.917024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.923892] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.943408] env[69982]: DEBUG oslo_concurrency.lockutils [req-89bae36e-5293-4866-b158-1cb84c961a54 req-b4e79f71-e09e-426a-83f8-25ba83ed9788 service nova] Releasing lock "refresh_cache-9a1de08e-3206-44cc-8d34-a5527faf9684" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.948232] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.948705] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.949171] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleting the datastore file [datastore2] 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.949497] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b62f161-0dfd-4480-9a8a-f78421de83d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.957225] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for the task: (returnval){ [ 831.957225] env[69982]: value = "task-3864581" [ 831.957225] env[69982]: _type = "Task" [ 831.957225] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.968997] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.223703] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 832.248546] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.251893] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.252209] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.252209] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.252209] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.252209] env[69982]: DEBUG nova.virt.hardware [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.252209] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d50f88a-9fe9-4e69-81e3-ed53b0c4c944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.260535] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6e7c79-36b3-4866-a92e-098db1eedb29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.275516] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.284789] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Creating folder: Project (2ad533b5c32f481da5c44a68e2b18a94). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.284789] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-966391a2-529e-4033-b585-d2ac82c91ec0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.295082] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Created folder: Project (2ad533b5c32f481da5c44a68e2b18a94) in parent group-v767796. [ 832.295300] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Creating folder: Instances. Parent ref: group-v767939. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.295552] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6b81707-25f1-4d21-bf10-a6d9bd3f094c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.305313] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Created folder: Instances in parent group-v767939. [ 832.305492] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.305697] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.305929] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be69e17e-eb64-49df-b2ff-d345a9e1d51c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.322892] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.322892] env[69982]: value = "task-3864584" [ 832.322892] env[69982]: _type = "Task" [ 832.322892] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.330951] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864584, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.398354] env[69982]: DEBUG nova.scheduler.client.report [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 832.417468] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:06:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d19589d5-9552-4797-87a2-fa71245a23ed',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-913055492',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.417744] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.417932] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.418149] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.418388] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.418559] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.418778] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.418981] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.419276] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.419362] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.419582] env[69982]: DEBUG nova.virt.hardware [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.425412] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 832.426386] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66ebc8bf-5bb0-49a3-995d-0a5ccdaea616 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.452035] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186865} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.452275] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.452658] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 832.452658] env[69982]: value = "task-3864585" [ 832.452658] env[69982]: _type = "Task" [ 832.452658] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.453846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5cd658-e9f8-4982-ba98-d34fe178caef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.485933] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] a427dc30-7e0f-4313-a8e9-f76451e4a112/a427dc30-7e0f-4313-a8e9-f76451e4a112.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.496478] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9df8ba9d-95bf-49a6-80d2-51ac38cc29da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.509560] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.514120] env[69982]: DEBUG oslo_vmware.api [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Task: {'id': task-3864581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152783} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.517124] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.517330] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 832.517515] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.517693] env[69982]: INFO nova.compute.manager [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Took 1.22 seconds to destroy the instance on the hypervisor. [ 832.517929] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.518606] env[69982]: DEBUG nova.compute.manager [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 832.518695] env[69982]: DEBUG nova.network.neutron [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 832.522796] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 832.522796] env[69982]: value = "task-3864586" [ 832.522796] env[69982]: _type = "Task" [ 832.522796] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.532493] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864586, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.650148] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 832.650482] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.650654] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 832.650840] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.650986] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 832.651150] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 832.651361] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 832.651524] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 832.651691] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 832.651855] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 832.652037] env[69982]: DEBUG nova.virt.hardware [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 832.654125] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dbc674-4021-4829-9d9f-d28756e1dc10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.662458] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d4d603-f9db-4255-a2da-0f87276d023b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.684157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:ec:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94487a32-cb22-46ea-afe4-0dab4d8d18be', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.694482] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.694482] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.694693] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b6fda32-2915-4062-9ef5-0f1cdb659557 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.718483] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.718483] env[69982]: value = "task-3864587" [ 832.718483] env[69982]: _type = "Task" [ 832.718483] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.728115] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864587, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.834976] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864584, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.903240] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.903788] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 832.906490] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.362s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.907911] env[69982]: INFO nova.compute.claims [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 832.967331] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864585, 'name': ReconfigVM_Task, 'duration_secs': 0.219949} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.967331] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 832.968304] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21b27c5e-e7ba-4119-9f2c-bae1c5da6aae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.994695] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.995928] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ed9402d-3adc-4af2-829a-60a7b7424f50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.025658] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 833.025658] env[69982]: value = "task-3864588" [ 833.025658] env[69982]: _type = "Task" [ 833.025658] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.043462] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864586, 'name': ReconfigVM_Task, 'duration_secs': 0.338277} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.043722] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.044045] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Reconfigured VM instance instance-00000031 to attach disk [datastore2] a427dc30-7e0f-4313-a8e9-f76451e4a112/a427dc30-7e0f-4313-a8e9-f76451e4a112.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.044727] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-118c4d35-8ae8-4ced-a301-c7e0d8caa22e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.051324] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 833.051324] env[69982]: value = "task-3864589" [ 833.051324] env[69982]: _type = "Task" [ 833.051324] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.062459] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864589, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.198289] env[69982]: DEBUG nova.compute.manager [req-e1351762-6eb7-44dd-b79a-982a0f610db3 req-5f1e0bac-aabc-4d68-85a1-b049587d43f5 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Received event network-vif-deleted-dd67d1de-a902-443e-a6bb-1cc4903f5ccd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.198289] env[69982]: INFO nova.compute.manager [req-e1351762-6eb7-44dd-b79a-982a0f610db3 req-5f1e0bac-aabc-4d68-85a1-b049587d43f5 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Neutron deleted interface dd67d1de-a902-443e-a6bb-1cc4903f5ccd; detaching it from the instance and deleting it from the info cache [ 833.198289] env[69982]: DEBUG nova.network.neutron [req-e1351762-6eb7-44dd-b79a-982a0f610db3 req-5f1e0bac-aabc-4d68-85a1-b049587d43f5 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.229881] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864587, 'name': CreateVM_Task, 'duration_secs': 0.318525} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.233030] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.233030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.233030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.233030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.233030] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57587294-0515-41b4-8406-68bd025824d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.239205] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 833.239205] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5284be11-fa04-492c-2cdb-58ee29f4e9ab" [ 833.239205] env[69982]: _type = "Task" [ 833.239205] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.250029] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5284be11-fa04-492c-2cdb-58ee29f4e9ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.334922] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864584, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.415854] env[69982]: DEBUG nova.compute.utils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 833.418858] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 833.420050] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 833.463916] env[69982]: DEBUG nova.policy [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea62e43062d24ecfb42e9d6b7e5bb8a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5102c0ae7ade4db1a99486f7632dbe3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 833.528142] env[69982]: DEBUG nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.529358] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ed230b-d594-4a9f-85c1-3684938e7fd1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.551091] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864588, 'name': ReconfigVM_Task, 'duration_secs': 0.386909} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.551091] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 833.551091] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.566517] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864589, 'name': Rename_Task, 'duration_secs': 0.160013} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.566811] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 833.567088] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce2e28e3-2b9e-487a-8653-4a9776df2b2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.579941] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 833.579941] env[69982]: value = "task-3864590" [ 833.579941] env[69982]: _type = "Task" [ 833.579941] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.593439] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864590, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.648165] env[69982]: DEBUG nova.network.neutron [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.704166] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e72b289c-a35c-47e7-aa24-2d029dc478ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.714253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070a40b2-8c23-4df3-a0b3-d6a3bc1d6e8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.760723] env[69982]: DEBUG nova.compute.manager [req-e1351762-6eb7-44dd-b79a-982a0f610db3 req-5f1e0bac-aabc-4d68-85a1-b049587d43f5 service nova] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Detach interface failed, port_id=dd67d1de-a902-443e-a6bb-1cc4903f5ccd, reason: Instance 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 833.765303] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5284be11-fa04-492c-2cdb-58ee29f4e9ab, 'name': SearchDatastore_Task, 'duration_secs': 0.011293} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.765485] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.765766] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.766058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.766251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.766492] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.766977] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-24e4623d-7cf8-4b16-a1a8-413dc2021ef5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.777705] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.778071] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.779019] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34292a75-aecd-4c08-8abb-a1f7570e426a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.787720] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 833.787720] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e2bc92-6221-7a5b-dbb3-b9165685cf04" [ 833.787720] env[69982]: _type = "Task" [ 833.787720] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.799128] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e2bc92-6221-7a5b-dbb3-b9165685cf04, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.799974] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17bf8d80-849c-4577-8455-96646d527775 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.808807] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 833.808807] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f740f-fe8f-3a84-77ef-4778644eda0a" [ 833.808807] env[69982]: _type = "Task" [ 833.808807] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.819427] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f740f-fe8f-3a84-77ef-4778644eda0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.820658] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Successfully created port: b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.835621] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864584, 'name': CreateVM_Task, 'duration_secs': 1.402822} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.835846] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.836325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.836550] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.836946] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.837255] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-077a234b-65c9-4c4c-b729-5a3fcb5ac8d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.842969] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 833.842969] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52212a7a-7796-7d40-6f6b-5a4d894b0c74" [ 833.842969] env[69982]: _type = "Task" [ 833.842969] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.853504] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52212a7a-7796-7d40-6f6b-5a4d894b0c74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.925437] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 834.051548] env[69982]: INFO nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] instance snapshotting [ 834.051770] env[69982]: WARNING nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 834.058115] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005b1579-5433-486e-96a1-344e32b9cd6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.061865] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc01e25-b5da-46ac-bf3e-838382ea34e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.103805] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d1c148-6150-467b-986b-9e8237b67ca9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.107136] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2f6f21-737b-41a9-b635-2e7a7c67c742 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.124327] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864590, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.141637] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 834.150794] env[69982]: INFO nova.compute.manager [-] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Took 1.63 seconds to deallocate network for instance. [ 834.321238] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f740f-fe8f-3a84-77ef-4778644eda0a, 'name': SearchDatastore_Task, 'duration_secs': 0.011612} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.324184] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.324477] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.324963] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee0fd6a1-9c29-4048-aa22-5c59380863f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.333184] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 834.333184] env[69982]: value = "task-3864591" [ 834.333184] env[69982]: _type = "Task" [ 834.333184] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.344158] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.356162] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52212a7a-7796-7d40-6f6b-5a4d894b0c74, 'name': SearchDatastore_Task, 'duration_secs': 0.010548} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.359426] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.359754] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.360074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.360244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.360526] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.361884] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ca22ae1-c7d8-4d7a-a970-8584a105f36b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.371982] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.371982] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.375376] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b8eb018-9a16-4895-98a5-e2eb790a6eb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.383050] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 834.383050] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5247a1e4-f4c5-2232-0eea-143cc0bcefac" [ 834.383050] env[69982]: _type = "Task" [ 834.383050] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.395761] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5247a1e4-f4c5-2232-0eea-143cc0bcefac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.586495] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd79d80-16a5-4586-8136-b46908568202 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.597370] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3c760b-0da8-4b32-bfac-be6340c68f14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.611605] env[69982]: DEBUG oslo_vmware.api [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864590, 'name': PowerOnVM_Task, 'duration_secs': 0.944936} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.638044] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 834.638044] env[69982]: INFO nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Took 8.90 seconds to spawn the instance on the hypervisor. [ 834.638321] env[69982]: DEBUG nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 834.639895] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 834.641540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9313aa73-68ef-49b5-a895-3caf10122a8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.645631] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-41a2fa91-40c2-47b0-923c-0dfc5d8980e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.649321] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b056d01-e28f-4d7f-bf01-1a7e735d96e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.658328] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.668848] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 834.668848] env[69982]: value = "task-3864592" [ 834.668848] env[69982]: _type = "Task" [ 834.668848] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.670450] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7343ff-d305-4945-9ae1-6a13c0f65c41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.693300] env[69982]: DEBUG nova.compute.provider_tree [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.698038] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864592, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.755741] env[69982]: DEBUG nova.network.neutron [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Port e0bdee0a-ac19-47d1-9d6b-baffaa7a181a binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 834.849418] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864591, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.898535] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5247a1e4-f4c5-2232-0eea-143cc0bcefac, 'name': SearchDatastore_Task, 'duration_secs': 0.01524} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.899496] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfcd181f-243f-471f-bedd-067ffdcd4c89 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.911935] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 834.911935] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524b8e48-4bd7-f9df-ce56-6f4e04ff66c9" [ 834.911935] env[69982]: _type = "Task" [ 834.911935] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.928681] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524b8e48-4bd7-f9df-ce56-6f4e04ff66c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.935388] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.964015] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.968693] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.970904] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.971317] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.971527] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.971777] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.972149] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.972434] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.972822] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.974383] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.974383] env[69982]: DEBUG nova.virt.hardware [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.974776] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b34c8a-4f93-4e36-a521-15ca9dd32371 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.991992] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af50dd59-9e9b-4567-ba1a-b202817a610a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.180222] env[69982]: INFO nova.compute.manager [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Took 45.06 seconds to build instance. [ 835.186422] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864592, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.199141] env[69982]: DEBUG nova.scheduler.client.report [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.349025] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864591, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690623} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.349025] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.349025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.349025] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c0b1efe-d546-4f0c-9da9-e7d397812124 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.356104] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 835.356104] env[69982]: value = "task-3864593" [ 835.356104] env[69982]: _type = "Task" [ 835.356104] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.369649] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.424210] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524b8e48-4bd7-f9df-ce56-6f4e04ff66c9, 'name': SearchDatastore_Task, 'duration_secs': 0.066095} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.424509] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.425419] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 835.425419] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f22a869d-493a-4de6-a3af-9b5e4864ef0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.432560] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 835.432560] env[69982]: value = "task-3864594" [ 835.432560] env[69982]: _type = "Task" [ 835.432560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.442235] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864594, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.508277] env[69982]: DEBUG nova.compute.manager [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Received event network-vif-plugged-b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 835.508277] env[69982]: DEBUG oslo_concurrency.lockutils [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] Acquiring lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.508277] env[69982]: DEBUG oslo_concurrency.lockutils [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] Lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.508277] env[69982]: DEBUG oslo_concurrency.lockutils [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] Lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.508277] env[69982]: DEBUG nova.compute.manager [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] No waiting events found dispatching network-vif-plugged-b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 835.509355] env[69982]: WARNING nova.compute.manager [req-8a3d828d-d16a-4728-be8d-a5c609913dcb req-a1784931-0429-45a5-b478-54a9ea68483f service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Received unexpected event network-vif-plugged-b5a29604-4742-41d4-b219-8bbd59089c94 for instance with vm_state building and task_state spawning. [ 835.578306] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Successfully updated port: b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 835.682716] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3b77127-9c78-4ed4-8afb-327f547d8337 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.612s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.688434] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864592, 'name': CreateSnapshot_Task, 'duration_secs': 0.788446} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.688642] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 835.689559] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120970a4-9833-433c-986c-be5989421888 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.703264] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.797s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.703803] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 835.706507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.541s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.707181] env[69982]: DEBUG nova.objects.instance [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lazy-loading 'resources' on Instance uuid d73153ad-9258-4c3c-9699-b6364408d631 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 835.789442] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.789720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.789863] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.869053] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072086} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.869809] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.871028] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e1fdfe-ecbb-4cf7-a533-f118ed0dcb48 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.899368] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.899588] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45a1dc56-09bf-440a-8949-3a8a04029563 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.925666] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 835.925666] env[69982]: value = "task-3864595" [ 835.925666] env[69982]: _type = "Task" [ 835.925666] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.942268] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864595, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.949057] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864594, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.081691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.081864] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.082031] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 836.190079] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.214061] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 836.214933] env[69982]: DEBUG nova.compute.utils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 836.218322] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-910388b2-c0ca-4598-aeeb-021102d75255 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.223129] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 836.223382] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 836.231746] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 836.231746] env[69982]: value = "task-3864596" [ 836.231746] env[69982]: _type = "Task" [ 836.231746] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.246327] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.307075] env[69982]: DEBUG nova.policy [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea62e43062d24ecfb42e9d6b7e5bb8a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5102c0ae7ade4db1a99486f7632dbe3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 836.446214] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864595, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.457489] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864594, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.84076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.460677] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 836.460961] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 836.463043] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b64f8768-682b-40d6-9e0d-3ebdbb9282c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.474250] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 836.474250] env[69982]: value = "task-3864597" [ 836.474250] env[69982]: _type = "Task" [ 836.474250] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.490453] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864597, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.641697] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 836.719936] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.724316] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 836.727658] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Successfully created port: 2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 836.749322] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.854989] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.855240] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.855454] env[69982]: DEBUG nova.network.neutron [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 836.875803] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10d4e76-8081-410e-970f-dfa4ff43d7cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.885155] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8391eaa7-55ee-4691-b86a-ff2140812088 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.918758] env[69982]: DEBUG nova.network.neutron [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updating instance_info_cache with network_info: [{"id": "b5a29604-4742-41d4-b219-8bbd59089c94", "address": "fa:16:3e:5d:73:4d", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a29604-47", "ovs_interfaceid": "b5a29604-4742-41d4-b219-8bbd59089c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.920498] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324dae85-d1f1-410d-9249-46277cec0dc4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.932387] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05a2785-2aaf-45ff-ad4d-25e4c41c9518 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.948408] env[69982]: DEBUG nova.compute.provider_tree [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.953480] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864595, 'name': ReconfigVM_Task, 'duration_secs': 0.633546} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.953587] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Reconfigured VM instance instance-0000002e to attach disk [datastore2] a70fa652-4726-4bc2-966f-530aaa79ba86/a70fa652-4726-4bc2-966f-530aaa79ba86.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.954686] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24e46ee2-e441-4109-bcac-6d35351dd3bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.962130] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 836.962130] env[69982]: value = "task-3864598" [ 836.962130] env[69982]: _type = "Task" [ 836.962130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.971242] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864598, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.982580] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864597, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088938} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.982875] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.983857] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52396f41-60c9-4e1f-b887-c92730e6ddc4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.005623] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 837.006293] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e040523-7d8e-486f-a281-15c43f9d119f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.026369] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 837.026369] env[69982]: value = "task-3864599" [ 837.026369] env[69982]: _type = "Task" [ 837.026369] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.035224] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864599, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.251346] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.423855] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.426176] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Instance network_info: |[{"id": "b5a29604-4742-41d4-b219-8bbd59089c94", "address": "fa:16:3e:5d:73:4d", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a29604-47", "ovs_interfaceid": "b5a29604-4742-41d4-b219-8bbd59089c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 837.426326] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:73:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39cd75b0-9ec7-48ed-b57f-34da0c573a60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5a29604-4742-41d4-b219-8bbd59089c94', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 837.434663] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating folder: Project (5102c0ae7ade4db1a99486f7632dbe3e). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.434931] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e20784a7-17f7-4388-826f-a80afe24cfa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.450227] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created folder: Project (5102c0ae7ade4db1a99486f7632dbe3e) in parent group-v767796. [ 837.450800] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating folder: Instances. Parent ref: group-v767945. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 837.450800] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b16df056-de30-4820-b37b-62aac98b030c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.454750] env[69982]: DEBUG nova.scheduler.client.report [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.465987] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created folder: Instances in parent group-v767945. [ 837.465987] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 837.469613] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 837.470172] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8106b598-71f6-469a-9229-74e812ab6de3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.494891] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864598, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.499901] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 837.499901] env[69982]: value = "task-3864602" [ 837.499901] env[69982]: _type = "Task" [ 837.499901] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.516214] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864602, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.539861] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864599, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.653215] env[69982]: DEBUG nova.network.neutron [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.720296] env[69982]: DEBUG nova.compute.manager [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Received event network-changed-b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.720296] env[69982]: DEBUG nova.compute.manager [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Refreshing instance network info cache due to event network-changed-b5a29604-4742-41d4-b219-8bbd59089c94. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 837.720296] env[69982]: DEBUG oslo_concurrency.lockutils [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] Acquiring lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.720835] env[69982]: DEBUG oslo_concurrency.lockutils [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] Acquired lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.722659] env[69982]: DEBUG nova.network.neutron [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Refreshing network info cache for port b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 837.747416] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 837.753031] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.787449] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 837.787449] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 837.787998] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 837.788460] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 837.788795] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 837.791040] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 837.791040] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 837.791040] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 837.791040] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 837.791040] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 837.791360] env[69982]: DEBUG nova.virt.hardware [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 837.791845] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342cb656-5820-452a-8656-237931e92888 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.802227] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c630dfd5-fc86-4f46-903f-b9faaaab47b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.927098] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.928274] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.970441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.973496] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.295s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.973496] env[69982]: DEBUG nova.objects.instance [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lazy-loading 'resources' on Instance uuid ad0c405f-48c8-4726-8e95-eb83a6e158fe {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 837.986554] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864598, 'name': Rename_Task, 'duration_secs': 0.726488} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.986806] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.987084] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8796353-d770-4bd6-852f-e2183df76bb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.996670] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 837.996670] env[69982]: value = "task-3864603" [ 837.996670] env[69982]: _type = "Task" [ 837.996670] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.005598] env[69982]: INFO nova.scheduler.client.report [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Deleted allocations for instance d73153ad-9258-4c3c-9699-b6364408d631 [ 838.012340] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864603, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.019027] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864602, 'name': CreateVM_Task, 'duration_secs': 0.454424} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.019027] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 838.019870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.020131] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.020546] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 838.023019] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9100cecc-b13c-41e0-ad82-94e38b91f0c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.028091] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 838.028091] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521a0ccd-a485-6f68-23a2-9d776d097190" [ 838.028091] env[69982]: _type = "Task" [ 838.028091] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.046826] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521a0ccd-a485-6f68-23a2-9d776d097190, 'name': SearchDatastore_Task, 'duration_secs': 0.015877} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.050885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.051212] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 838.051463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.051641] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.052961] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 838.052961] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864599, 'name': ReconfigVM_Task, 'duration_secs': 0.858645} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.052961] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4451883-6880-4fb6-b036-f2722f1bd075 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.055512] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Reconfigured VM instance instance-00000032 to attach disk [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 838.057175] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d64aaeed-5513-4dd4-9536-da7b53343fc7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.066048] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 838.066048] env[69982]: value = "task-3864604" [ 838.066048] env[69982]: _type = "Task" [ 838.066048] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.066343] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 838.066484] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 838.071352] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f7adb20-36fb-466d-9674-834e4e74c4df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.078776] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 838.078776] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5262d444-eef8-93c2-03bf-8bb5aad82239" [ 838.078776] env[69982]: _type = "Task" [ 838.078776] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.082744] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864604, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.093063] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5262d444-eef8-93c2-03bf-8bb5aad82239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.156268] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.248493] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.433766] env[69982]: DEBUG nova.compute.utils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 838.450858] env[69982]: DEBUG nova.network.neutron [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updated VIF entry in instance network info cache for port b5a29604-4742-41d4-b219-8bbd59089c94. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 838.451158] env[69982]: DEBUG nova.network.neutron [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updating instance_info_cache with network_info: [{"id": "b5a29604-4742-41d4-b219-8bbd59089c94", "address": "fa:16:3e:5d:73:4d", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a29604-47", "ovs_interfaceid": "b5a29604-4742-41d4-b219-8bbd59089c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.508642] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864603, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.524377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0441983e-af90-44c2-99e0-9e1ea15dd26f tempest-ServersListShow296Test-945083984 tempest-ServersListShow296Test-945083984-project-member] Lock "d73153ad-9258-4c3c-9699-b6364408d631" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.703s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.576827] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864604, 'name': Rename_Task, 'duration_secs': 0.14926} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.577359] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.578232] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb3dcdc5-5655-4356-8e03-3279cf57632b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.591265] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 838.591265] env[69982]: value = "task-3864605" [ 838.591265] env[69982]: _type = "Task" [ 838.591265] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.599024] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5262d444-eef8-93c2-03bf-8bb5aad82239, 'name': SearchDatastore_Task, 'duration_secs': 0.021231} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.606799] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eee66080-0b80-4d94-bddf-271299440085 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.617658] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864605, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.620082] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 838.620082] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524079a1-8150-82ad-827b-11da1abba4fe" [ 838.620082] env[69982]: _type = "Task" [ 838.620082] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.638536] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524079a1-8150-82ad-827b-11da1abba4fe, 'name': SearchDatastore_Task, 'duration_secs': 0.017321} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.638656] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.639422] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/cd839916-6daf-4b31-941d-6305a585bfaa.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 838.639422] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb300cbe-0eab-4974-9be2-203387aed8a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.652020] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 838.652020] env[69982]: value = "task-3864606" [ 838.652020] env[69982]: _type = "Task" [ 838.652020] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.675845] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.691319] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b6aa3a-edec-457d-b66b-188b8aacf68f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.718044] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7057858f-350d-48ab-be6e-1e09cc4b6ff2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.727332] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 838.751065] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864596, 'name': CloneVM_Task, 'duration_secs': 2.048058} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.751241] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Created linked-clone VM from snapshot [ 838.752822] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb293960-f79b-4484-becd-a5599d84cda1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.769031] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Uploading image 6c11027b-62b0-4a6b-967c-97023308d763 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 838.804142] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 838.804142] env[69982]: value = "vm-767944" [ 838.804142] env[69982]: _type = "VirtualMachine" [ 838.804142] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 838.804847] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-710438a0-7969-430c-aa8d-0ccc708c0d55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.817105] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease: (returnval){ [ 838.817105] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525768b0-3a32-c999-966c-9e806749629b" [ 838.817105] env[69982]: _type = "HttpNfcLease" [ 838.817105] env[69982]: } obtained for exporting VM: (result){ [ 838.817105] env[69982]: value = "vm-767944" [ 838.817105] env[69982]: _type = "VirtualMachine" [ 838.817105] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 838.817619] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the lease: (returnval){ [ 838.817619] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525768b0-3a32-c999-966c-9e806749629b" [ 838.817619] env[69982]: _type = "HttpNfcLease" [ 838.817619] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 838.826468] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 838.826468] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525768b0-3a32-c999-966c-9e806749629b" [ 838.826468] env[69982]: _type = "HttpNfcLease" [ 838.826468] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 838.866533] env[69982]: DEBUG nova.compute.manager [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.866533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96e1b0b-93ce-48e5-8561-6deb3439e94d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.872802] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Successfully updated port: 2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 838.937619] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.954376] env[69982]: DEBUG oslo_concurrency.lockutils [req-fadd9cb6-30de-4c31-bb33-2f9a625f7013 req-196b2d5f-255a-4ed7-ba5b-f6dac7b4179d service nova] Releasing lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.008238] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864603, 'name': PowerOnVM_Task, 'duration_secs': 0.786533} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.011265] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.011520] env[69982]: DEBUG nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.012763] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69a387c-0c79-4417-b035-9b24e4ae2187 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.112447] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864605, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.131303] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf3144f-739d-4b92-9aa7-d2978d35814a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.141809] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f90f00-f6ea-4848-a090-f84671a51959 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.183185] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cc48bf-0af0-4c43-b074-6a8b94375883 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.192076] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864606, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.196019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf08105-02ce-40a9-af4f-a6ac10d81c9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.213429] env[69982]: DEBUG nova.compute.provider_tree [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 839.234265] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 839.234943] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5f2d2d6-72c8-4fc4-97d7-57c4a4d0c79f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.245168] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 839.245168] env[69982]: value = "task-3864608" [ 839.245168] env[69982]: _type = "Task" [ 839.245168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.259527] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864608, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.326701] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 839.326701] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525768b0-3a32-c999-966c-9e806749629b" [ 839.326701] env[69982]: _type = "HttpNfcLease" [ 839.326701] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 839.327041] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 839.327041] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525768b0-3a32-c999-966c-9e806749629b" [ 839.327041] env[69982]: _type = "HttpNfcLease" [ 839.327041] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 839.327914] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cdfba6-eb13-4acc-b0f6-e7977776c30a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.337374] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 839.337605] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 839.391860] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.391954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.392127] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 839.394308] env[69982]: INFO nova.compute.manager [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] instance snapshotting [ 839.397907] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61b5553-a94b-424a-9a93-b3c0d57d5b55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.418819] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ac478e-c9f7-4037-b36d-88f04c7b4e8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.477129] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3fc76f27-7c38-45fa-a783-ecbbf7336d50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.526900] env[69982]: INFO nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] bringing vm to original state: 'stopped' [ 839.609355] env[69982]: DEBUG oslo_vmware.api [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864605, 'name': PowerOnVM_Task, 'duration_secs': 0.588096} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.609673] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.609883] env[69982]: INFO nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Took 7.39 seconds to spawn the instance on the hypervisor. [ 839.610098] env[69982]: DEBUG nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.611042] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5698e767-3600-4d78-b00b-673fe6bc5056 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.693464] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864606, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652036} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.693464] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/cd839916-6daf-4b31-941d-6305a585bfaa.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.693464] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.693464] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8895c1d-9165-447a-b5bb-217c0f413195 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.704836] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 839.704836] env[69982]: value = "task-3864609" [ 839.704836] env[69982]: _type = "Task" [ 839.704836] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.719140] env[69982]: DEBUG nova.scheduler.client.report [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.731269] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864609, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.759252] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864608, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.932999] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 839.933738] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f1f7e650-c3de-45d9-ac70-7c7211b98e2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.942209] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 839.942209] env[69982]: value = "task-3864610" [ 839.942209] env[69982]: _type = "Task" [ 839.942209] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.959036] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864610, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.960255] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 839.975238] env[69982]: DEBUG nova.compute.manager [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Received event network-vif-plugged-2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.975708] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Acquiring lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.976949] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.976949] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.976949] env[69982]: DEBUG nova.compute.manager [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] No waiting events found dispatching network-vif-plugged-2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 839.976949] env[69982]: WARNING nova.compute.manager [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Received unexpected event network-vif-plugged-2c91625d-56d6-4706-930e-a55980f4feb1 for instance with vm_state building and task_state spawning. [ 839.976949] env[69982]: DEBUG nova.compute.manager [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Received event network-changed-2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 839.977209] env[69982]: DEBUG nova.compute.manager [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Refreshing instance network info cache due to event network-changed-2c91625d-56d6-4706-930e-a55980f4feb1. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 839.977352] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Acquiring lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.020948] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.021386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.021766] env[69982]: INFO nova.compute.manager [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Attaching volume bf0680bb-ab50-47a8-955a-fa309555586c to /dev/sdb [ 840.078717] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880b4b62-d992-49d5-a85f-2fa05a815935 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.088674] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5e01ae-2b2e-4e02-91b2-e2c40c16acf5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.106564] env[69982]: DEBUG nova.virt.block_device [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating existing volume attachment record: 297cae61-0bc1-458a-81b1-ce65b1fb9468 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 840.130693] env[69982]: INFO nova.compute.manager [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Took 45.04 seconds to build instance. [ 840.176918] env[69982]: DEBUG nova.network.neutron [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Updating instance_info_cache with network_info: [{"id": "2c91625d-56d6-4706-930e-a55980f4feb1", "address": "fa:16:3e:a4:b4:dd", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c91625d-56", "ovs_interfaceid": "2c91625d-56d6-4706-930e-a55980f4feb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.219149] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864609, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119332} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.219955] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 840.221283] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b3f448-6661-4787-ad37-b86f6e6f59f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.241068] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.253169] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/cd839916-6daf-4b31-941d-6305a585bfaa.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 840.254287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.765s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.254512] env[69982]: DEBUG nova.objects.instance [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lazy-loading 'resources' on Instance uuid 5743a020-0c09-45ec-aca4-5ce367cc201a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.255874] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dec4e414-78d0-4d10-9169-6aa2aef8e193 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.283917] env[69982]: INFO nova.scheduler.client.report [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Deleted allocations for instance ad0c405f-48c8-4726-8e95-eb83a6e158fe [ 840.290088] env[69982]: DEBUG oslo_vmware.api [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864608, 'name': PowerOnVM_Task, 'duration_secs': 0.698614} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.290618] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 840.290618] env[69982]: value = "task-3864612" [ 840.290618] env[69982]: _type = "Task" [ 840.290618] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.294300] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 840.294300] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3dd5c26-02ab-4f19-a590-81dcfee1ceea tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance '8b812422-4ca6-4d2b-b6af-873fdb21fab6' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 840.311844] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864612, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.459090] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864610, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.540635] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.540867] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.541070] env[69982]: DEBUG nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 840.545042] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7672b222-5777-4278-9ea7-98e1f95a0dbe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.554382] env[69982]: DEBUG nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 840.636040] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c029078-7384-47a1-a86a-cd916da97553 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.998s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.682249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.682783] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Instance network_info: |[{"id": "2c91625d-56d6-4706-930e-a55980f4feb1", "address": "fa:16:3e:a4:b4:dd", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c91625d-56", "ovs_interfaceid": "2c91625d-56d6-4706-930e-a55980f4feb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 840.683389] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Acquired lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.683599] env[69982]: DEBUG nova.network.neutron [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Refreshing network info cache for port 2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 840.686488] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:b4:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39cd75b0-9ec7-48ed-b57f-34da0c573a60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c91625d-56d6-4706-930e-a55980f4feb1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.693862] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.694995] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.695810] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-defef6d0-f613-472f-9660-f6bee11a1114 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.726177] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.726177] env[69982]: value = "task-3864615" [ 840.726177] env[69982]: _type = "Task" [ 840.726177] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.741716] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864615, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.810804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-717c53a7-e920-4f98-8227-56fd6a166a58 tempest-ServersV294TestFqdnHostnames-2099141024 tempest-ServersV294TestFqdnHostnames-2099141024-project-member] Lock "ad0c405f-48c8-4726-8e95-eb83a6e158fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.132s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.819358] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864612, 'name': ReconfigVM_Task, 'duration_secs': 0.483285} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.822728] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfigured VM instance instance-00000033 to attach disk [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/cd839916-6daf-4b31-941d-6305a585bfaa.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.824858] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5208d0ac-1303-48d6-aa2e-8a5c59b56db9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.834898] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 840.834898] env[69982]: value = "task-3864616" [ 840.834898] env[69982]: _type = "Task" [ 840.834898] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.847549] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864616, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.944540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c49196-f8b4-47bf-901d-451e2e77aca5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.963488] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66968aad-af4f-4989-af7a-674f225eb969 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.967698] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864610, 'name': CreateSnapshot_Task, 'duration_secs': 0.99428} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.968100] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 840.969323] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1567e4-0087-4fd0-9ac5-b86794e4a8b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.001273] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafe89d9-01be-400b-8810-c9fcac333d74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.021029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c5e40d-f3de-4ac7-9aa6-27fb785b6413 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.040370] env[69982]: DEBUG nova.compute.provider_tree [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.059872] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.060215] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92ea0d87-bbda-44bd-8a83-8eda69987b5b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.069950] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 841.069950] env[69982]: value = "task-3864617" [ 841.069950] env[69982]: _type = "Task" [ 841.069950] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.083900] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.138586] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.240665] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864615, 'name': CreateVM_Task, 'duration_secs': 0.445605} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.240847] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 841.241608] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.241785] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.242121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 841.242381] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d46b046-ce53-4e8b-9928-110bde91d56c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.248308] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 841.248308] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526cf1ed-8d6c-6b55-a761-23d04dc4996e" [ 841.248308] env[69982]: _type = "Task" [ 841.248308] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.260289] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526cf1ed-8d6c-6b55-a761-23d04dc4996e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.347699] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864616, 'name': Rename_Task, 'duration_secs': 0.189741} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.348028] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 841.349411] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0fd2fad-48d4-4d5f-98b9-d665b75d0d51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.358446] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 841.358446] env[69982]: value = "task-3864618" [ 841.358446] env[69982]: _type = "Task" [ 841.358446] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.369356] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864618, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.526493] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 841.529876] env[69982]: DEBUG nova.network.neutron [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Updated VIF entry in instance network info cache for port 2c91625d-56d6-4706-930e-a55980f4feb1. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 841.530297] env[69982]: DEBUG nova.network.neutron [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Updating instance_info_cache with network_info: [{"id": "2c91625d-56d6-4706-930e-a55980f4feb1", "address": "fa:16:3e:a4:b4:dd", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c91625d-56", "ovs_interfaceid": "2c91625d-56d6-4706-930e-a55980f4feb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.531894] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-183b857e-58e4-4f58-aba0-00b8ceae60dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.546033] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 841.546033] env[69982]: value = "task-3864619" [ 841.546033] env[69982]: _type = "Task" [ 841.546033] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.551168] env[69982]: DEBUG nova.scheduler.client.report [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 841.562298] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864619, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.581419] env[69982]: DEBUG oslo_vmware.api [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864617, 'name': PowerOffVM_Task, 'duration_secs': 0.251962} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.581555] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.581767] env[69982]: DEBUG nova.compute.manager [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.582903] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c297254-7046-473d-9641-74c828d73367 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.605834] env[69982]: INFO nova.compute.manager [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Rebuilding instance [ 841.673970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.677853] env[69982]: DEBUG nova.compute.manager [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.679927] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c1b26f-1ac3-4454-98eb-be456b41efe9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.762549] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526cf1ed-8d6c-6b55-a761-23d04dc4996e, 'name': SearchDatastore_Task, 'duration_secs': 0.017076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.762942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 841.763241] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.763492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.763664] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.764326] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.764647] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38368df5-1b67-4bf2-9aa5-5bc80207d652 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.776308] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.776670] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.777734] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cc1ffd3-3df9-42da-ad51-6fec6181110b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.786256] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 841.786256] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0f128-df6b-e38b-dceb-2cbe29e866b6" [ 841.786256] env[69982]: _type = "Task" [ 841.786256] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.796588] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0f128-df6b-e38b-dceb-2cbe29e866b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.870822] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864618, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.037756] env[69982]: DEBUG oslo_concurrency.lockutils [req-76532cda-506e-42b7-9883-c74dc3397d83 req-4785c1dc-5163-4edc-b776-8ed8a6e1b7f1 service nova] Releasing lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.060932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.064355] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864619, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.065119] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.278s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.067330] env[69982]: INFO nova.compute.claims [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 842.090418] env[69982]: INFO nova.scheduler.client.report [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Deleted allocations for instance 5743a020-0c09-45ec-aca4-5ce367cc201a [ 842.099551] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.558s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.302116] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0f128-df6b-e38b-dceb-2cbe29e866b6, 'name': SearchDatastore_Task, 'duration_secs': 0.012076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.303304] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c24005b-b15f-4c12-b09b-3989acfc85ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.312104] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 842.312104] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e14f34-c1cc-16a6-8dfa-3afdbb8f67fc" [ 842.312104] env[69982]: _type = "Task" [ 842.312104] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.325585] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e14f34-c1cc-16a6-8dfa-3afdbb8f67fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.373650] env[69982]: DEBUG oslo_vmware.api [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864618, 'name': PowerOnVM_Task, 'duration_secs': 0.727507} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.373979] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.374980] env[69982]: INFO nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Took 7.44 seconds to spawn the instance on the hypervisor. [ 842.374980] env[69982]: DEBUG nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 842.376101] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a6435d-de00-4bd9-ad75-29bf6ca08bf5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.564971] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864619, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.601975] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f43d0458-39d0-4d6c-bc6c-5c71d6075ef7 tempest-ServersTestFqdnHostnames-330353066 tempest-ServersTestFqdnHostnames-330353066-project-member] Lock "5743a020-0c09-45ec-aca4-5ce367cc201a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.956s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.609451] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.699735] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.699937] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30f8c0fc-f213-4d14-9764-1ba18140f513 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.711429] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 842.711429] env[69982]: value = "task-3864621" [ 842.711429] env[69982]: _type = "Task" [ 842.711429] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.722639] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.825407] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e14f34-c1cc-16a6-8dfa-3afdbb8f67fc, 'name': SearchDatastore_Task, 'duration_secs': 0.014425} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.825750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.826067] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/d43e5e7a-577d-4fe9-aff7-9012adfbdb9a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 842.826436] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b74bce88-1539-4382-a441-5ab22bd35519 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.835472] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 842.835472] env[69982]: value = "task-3864622" [ 842.835472] env[69982]: _type = "Task" [ 842.835472] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.846302] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.907227] env[69982]: INFO nova.compute.manager [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Took 47.41 seconds to build instance. [ 843.067937] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864619, 'name': CloneVM_Task, 'duration_secs': 1.478393} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.068849] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Created linked-clone VM from snapshot [ 843.069965] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1a04ff-8a20-43a6-9001-1855cf826473 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.088548] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Uploading image 06e26392-9dbf-4a3d-860a-019bf4936e02 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 843.106563] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 843.106563] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2d3389bb-5a69-4a55-9646-ded2ecd5ae6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.117030] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 843.117030] env[69982]: value = "task-3864623" [ 843.117030] env[69982]: _type = "Task" [ 843.117030] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.140552] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864623, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.224186] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864621, 'name': PowerOffVM_Task, 'duration_secs': 0.167931} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.230020] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 843.230020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.230415] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0d5a57-6c24-4859-a71b-93d253da1f6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.239521] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 843.240128] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c19520c-3079-4781-a1f8-eed374ed0e89 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.277155] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 843.278772] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 843.278772] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Deleting the datastore file [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 843.278772] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15a195c7-b5b7-4854-b77b-9da01b91121d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.286082] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 843.286082] env[69982]: value = "task-3864625" [ 843.286082] env[69982]: _type = "Task" [ 843.286082] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.296453] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.351162] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864622, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.408991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a464fb96-5f13-4043-9208-9b7243a5f700 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.787s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.632376] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864623, 'name': Destroy_Task, 'duration_secs': 0.510521} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.632840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.633097] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.633302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.633499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.633671] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.635286] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Destroyed the VM [ 843.636292] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 843.636615] env[69982]: INFO nova.compute.manager [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Terminating instance [ 843.638350] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5fe1be66-b172-4fa8-862e-e53b73be0bc6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.653468] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 843.653468] env[69982]: value = "task-3864626" [ 843.653468] env[69982]: _type = "Task" [ 843.653468] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.663875] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.751000] env[69982]: DEBUG nova.network.neutron [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Port e0bdee0a-ac19-47d1-9d6b-baffaa7a181a binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 843.751000] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.751000] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.751000] env[69982]: DEBUG nova.network.neutron [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 843.800477] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166087} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.800477] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.800615] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.800784] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.820423] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9557d52c-c1ca-4161-a8bc-be2c25d1767b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.829651] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe625a78-1f38-4ffa-8a32-8e505aa5739d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.870473] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b3605c-102f-499d-9acf-ccf9404ee8b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.876742] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543707} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.877555] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/d43e5e7a-577d-4fe9-aff7-9012adfbdb9a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 843.877818] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 843.878131] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6e33f059-de75-4164-8dd1-2e4553c6c13e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.884545] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c392e197-d3c7-43b3-9cfb-cc28a91d898f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.891275] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 843.891275] env[69982]: value = "task-3864627" [ 843.891275] env[69982]: _type = "Task" [ 843.891275] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.906203] env[69982]: DEBUG nova.compute.provider_tree [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.913757] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 843.916900] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.150209] env[69982]: DEBUG nova.compute.manager [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 844.150895] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 844.151320] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b68fc8-46d2-4ce2-9dd3-cae348f136ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.163633] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.167234] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c8ff3cf-b5f9-4e3f-8041-f9832a2e8662 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.168946] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.233264] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.233514] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.233821] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore2] a70fa652-4726-4bc2-966f-530aaa79ba86 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.234148] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed19dba2-674e-45a1-ba09-8207d72e22c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.246022] env[69982]: DEBUG oslo_vmware.api [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 844.246022] env[69982]: value = "task-3864629" [ 844.246022] env[69982]: _type = "Task" [ 844.246022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.254391] env[69982]: DEBUG oslo_vmware.api [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864629, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.407010] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10256} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.410041] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.411058] env[69982]: DEBUG nova.scheduler.client.report [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.415415] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afdb350-0509-408e-bbab-c144435a028f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.448033] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/d43e5e7a-577d-4fe9-aff7-9012adfbdb9a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.451953] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cae19a9f-b9c6-4f68-a4b5-9392c5d7b3a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.483700] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 844.483700] env[69982]: value = "task-3864630" [ 844.483700] env[69982]: _type = "Task" [ 844.483700] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.493449] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.498447] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864630, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.667320] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864626, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.682470] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 844.682975] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767950', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'name': 'volume-bf0680bb-ab50-47a8-955a-fa309555586c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a8217447-bc22-4b84-925f-c3c09fb7228c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'serial': 'bf0680bb-ab50-47a8-955a-fa309555586c'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 844.683769] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95925c0e-9929-4c27-ba65-fa32b02233c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.712202] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc178b6-d0d8-42e2-852c-7e54b7c60500 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.744613] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] volume-bf0680bb-ab50-47a8-955a-fa309555586c/volume-bf0680bb-ab50-47a8-955a-fa309555586c.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.745059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17fa2952-88ca-49e4-a317-829f533aa417 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.772622] env[69982]: DEBUG oslo_vmware.api [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217397} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.774499] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 844.774951] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 844.774951] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 844.775128] env[69982]: INFO nova.compute.manager [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Took 0.62 seconds to destroy the instance on the hypervisor. [ 844.775356] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.776035] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Waiting for the task: (returnval){ [ 844.776035] env[69982]: value = "task-3864631" [ 844.776035] env[69982]: _type = "Task" [ 844.776035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.776035] env[69982]: DEBUG nova.compute.manager [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 844.776035] env[69982]: DEBUG nova.network.neutron [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 844.791901] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864631, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.807347] env[69982]: DEBUG nova.network.neutron [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.853863] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 844.854137] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 844.854362] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 844.854510] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 844.854657] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 844.854807] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 844.855170] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 844.855376] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 844.855552] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 844.855743] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 844.855920] env[69982]: DEBUG nova.virt.hardware [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 844.857904] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144433b2-c49b-4bbb-a8cd-7f845749af0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.869734] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986e2d16-2eaf-4de9-904f-fef2ed0143e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.886181] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.892535] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.894910] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.896057] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-222ff2fc-d6ba-4d82-8a45-cc1da7a6b845 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.916643] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.916643] env[69982]: value = "task-3864632" [ 844.916643] env[69982]: _type = "Task" [ 844.916643] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.923303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.858s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.923858] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 844.926868] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 40.091s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.926868] env[69982]: DEBUG nova.objects.instance [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 844.941614] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864632, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.005934] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864630, 'name': ReconfigVM_Task, 'duration_secs': 0.307796} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.006324] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Reconfigured VM instance instance-00000034 to attach disk [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/d43e5e7a-577d-4fe9-aff7-9012adfbdb9a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.008123] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-555a98d0-a50d-43c0-b56f-cf37f4a59823 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.021723] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 845.021723] env[69982]: value = "task-3864633" [ 845.021723] env[69982]: _type = "Task" [ 845.021723] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.035369] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864633, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.167672] env[69982]: DEBUG oslo_vmware.api [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864626, 'name': RemoveSnapshot_Task, 'duration_secs': 1.187274} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.167672] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 845.299851] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.311412] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.427684] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864632, 'name': CreateVM_Task, 'duration_secs': 0.340869} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.427898] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.428372] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.428560] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.428905] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.429198] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70528108-05d7-4f56-8ae3-3c38f52ce8ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.439022] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 845.439022] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ba1e01-2688-30f8-38b2-7e40a1b6f071" [ 845.439022] env[69982]: _type = "Task" [ 845.439022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.440299] env[69982]: DEBUG nova.compute.utils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 845.443219] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 845.443427] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 845.463258] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ba1e01-2688-30f8-38b2-7e40a1b6f071, 'name': SearchDatastore_Task, 'duration_secs': 0.011458} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.463590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.463836] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 845.464091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.464572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.464572] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 845.465570] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bc20ff1-e35b-46b2-86b5-79b35420e794 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.470384] env[69982]: DEBUG nova.compute.manager [req-f417a2e9-bbf6-451e-95be-3c2d5213f82c req-89c012b5-94a4-40df-be88-983da3c9238d service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Received event network-vif-deleted-94487a32-cb22-46ea-afe4-0dab4d8d18be {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 845.470518] env[69982]: INFO nova.compute.manager [req-f417a2e9-bbf6-451e-95be-3c2d5213f82c req-89c012b5-94a4-40df-be88-983da3c9238d service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Neutron deleted interface 94487a32-cb22-46ea-afe4-0dab4d8d18be; detaching it from the instance and deleting it from the info cache [ 845.473800] env[69982]: DEBUG nova.network.neutron [req-f417a2e9-bbf6-451e-95be-3c2d5213f82c req-89c012b5-94a4-40df-be88-983da3c9238d service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.481234] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 845.483636] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 845.483636] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8775ef25-82aa-431e-bb6b-872b2217fad7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.490064] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 845.490064] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ea7537-0223-ecd3-d1cf-07154fead0a2" [ 845.490064] env[69982]: _type = "Task" [ 845.490064] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.501950] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ea7537-0223-ecd3-d1cf-07154fead0a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.503852] env[69982]: DEBUG nova.policy [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 845.534128] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864633, 'name': Rename_Task, 'duration_secs': 0.194613} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.534564] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.535085] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-837e29d8-a049-4619-bec9-bf6e8c9dea0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.543152] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 845.543152] env[69982]: value = "task-3864634" [ 845.543152] env[69982]: _type = "Task" [ 845.543152] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.553233] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.674098] env[69982]: WARNING nova.compute.manager [None req-f52f5d0c-1954-4957-b604-e6b6a9081b1e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Image not found during snapshot: nova.exception.ImageNotFound: Image 06e26392-9dbf-4a3d-860a-019bf4936e02 could not be found. [ 845.772071] env[69982]: DEBUG nova.network.neutron [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.799244] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864631, 'name': ReconfigVM_Task, 'duration_secs': 0.643356} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.803021] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfigured VM instance instance-00000009 to attach disk [datastore2] volume-bf0680bb-ab50-47a8-955a-fa309555586c/volume-bf0680bb-ab50-47a8-955a-fa309555586c.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.808641] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f627481-18ba-4fb1-8a2a-78ea02452518 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.824565] env[69982]: DEBUG nova.compute.manager [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69982) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 845.825287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.832079] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Waiting for the task: (returnval){ [ 845.832079] env[69982]: value = "task-3864635" [ 845.832079] env[69982]: _type = "Task" [ 845.832079] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.843027] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.943897] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 845.947673] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04f8ca66-7845-491d-a100-f7584794a58a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.948379] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.144s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.957053] env[69982]: INFO nova.compute.claims [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.980034] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f01f22ab-ee20-41cb-aac8-11bbd973a99e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.015702] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ea7537-0223-ecd3-d1cf-07154fead0a2, 'name': SearchDatastore_Task, 'duration_secs': 0.012273} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.021448] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa90f27-39bd-455b-ae50-7bac85fe2abd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.036678] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Successfully created port: 62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.040945] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1dfa1d6-4960-4de9-99ad-315e9a71e00e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.056142] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 846.056142] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e5cad0-406e-c8fc-94be-510bd0bc4072" [ 846.056142] env[69982]: _type = "Task" [ 846.056142] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.060589] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864634, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.084530] env[69982]: DEBUG nova.compute.manager [req-f417a2e9-bbf6-451e-95be-3c2d5213f82c req-89c012b5-94a4-40df-be88-983da3c9238d service nova] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Detach interface failed, port_id=94487a32-cb22-46ea-afe4-0dab4d8d18be, reason: Instance a70fa652-4726-4bc2-966f-530aaa79ba86 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 846.090919] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e5cad0-406e-c8fc-94be-510bd0bc4072, 'name': SearchDatastore_Task, 'duration_secs': 0.013399} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.091233] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.091494] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 846.091964] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c03bd7c9-05df-4f50-8ac2-94f45d75ba1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.100685] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 846.100685] env[69982]: value = "task-3864636" [ 846.100685] env[69982]: _type = "Task" [ 846.100685] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.112344] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864636, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.275439] env[69982]: INFO nova.compute.manager [-] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Took 1.50 seconds to deallocate network for instance. [ 846.349675] env[69982]: DEBUG oslo_vmware.api [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864635, 'name': ReconfigVM_Task, 'duration_secs': 0.196171} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.349996] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767950', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'name': 'volume-bf0680bb-ab50-47a8-955a-fa309555586c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a8217447-bc22-4b84-925f-c3c09fb7228c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'serial': 'bf0680bb-ab50-47a8-955a-fa309555586c'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 846.381992] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.382853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.383262] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.384925] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.384925] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.386630] env[69982]: INFO nova.compute.manager [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Terminating instance [ 846.559276] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864634, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.613247] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864636, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.783644] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.892611] env[69982]: DEBUG nova.compute.manager [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 846.892973] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.896049] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c51363c-a51f-4134-83ef-a0daac477cb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.908081] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.908257] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-448e1a13-bb09-4299-972c-d6f543cc25f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.919310] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 846.919310] env[69982]: value = "task-3864637" [ 846.919310] env[69982]: _type = "Task" [ 846.919310] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.935333] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.980879] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 847.015239] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 847.015478] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 847.015666] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 847.015907] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 847.016136] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 847.016248] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 847.016493] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 847.016692] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 847.016913] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 847.017226] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 847.017516] env[69982]: DEBUG nova.virt.hardware [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 847.018459] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ea6b68-cbfe-402c-8ff5-8b83d3916c21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.031162] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca3d9fd-5610-4fc4-a049-55dd9c18f139 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.063307] env[69982]: DEBUG oslo_vmware.api [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864634, 'name': PowerOnVM_Task, 'duration_secs': 1.328693} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.063652] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.064115] env[69982]: INFO nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Took 9.32 seconds to spawn the instance on the hypervisor. [ 847.064115] env[69982]: DEBUG nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.064936] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82aa731-8943-40db-a30b-477062472eb1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.118478] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864636, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.652885} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.118782] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 847.119021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 847.119288] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47778962-357f-4ed8-8b02-4e69cabf4e14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.132099] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 847.132099] env[69982]: value = "task-3864638" [ 847.132099] env[69982]: _type = "Task" [ 847.132099] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.153041] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.402072] env[69982]: DEBUG nova.objects.instance [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lazy-loading 'flavor' on Instance uuid a8217447-bc22-4b84-925f-c3c09fb7228c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.432690] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864637, 'name': PowerOffVM_Task, 'duration_secs': 0.272356} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.433518] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.433944] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.436462] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-15f3b66c-0d40-4896-aae5-a6d8a34478e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.507626] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.507626] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.507803] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleting the datastore file [datastore2] a427dc30-7e0f-4313-a8e9-f76451e4a112 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.508126] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4be867f6-efc7-4a68-989e-e04a31142787 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.515854] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 847.515854] env[69982]: value = "task-3864640" [ 847.515854] env[69982]: _type = "Task" [ 847.515854] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.526289] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.585422] env[69982]: INFO nova.compute.manager [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Took 51.07 seconds to build instance. [ 847.633762] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 847.635019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ef5d6b-74dd-44b4-a15d-f0bed8fd65f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.642144] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf64748-0eb4-41ed-9e10-bfa77af5f386 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.649537] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 847.649787] env[69982]: ERROR oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk due to incomplete transfer. [ 847.654149] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-216c4e7f-92c1-4266-99e6-d29e2dad8590 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.656017] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120139} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.658240] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a0c6d7-6528-477a-9337-adc0d86a029f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.662116] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 847.666435] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7609fe81-9be6-4358-bf03-1f6a1119bacf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.674476] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5273d2e0-b841-bec4-60b2-0b671bbc08da/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 847.674860] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Uploaded image 6c11027b-62b0-4a6b-967c-97023308d763 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 847.680673] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 847.711238] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c2e99971-91ff-4447-b5c0-92e174afc7a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.722516] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-439ac8b3-3566-4a53-be81-2e4a6e701f2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.735083] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 847.735996] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e729458f-20ba-4e79-8ec8-27b0ce560074 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.755045] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 847.755045] env[69982]: value = "task-3864641" [ 847.755045] env[69982]: _type = "Task" [ 847.755045] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.763563] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbbca280-5857-41f4-b3ea-6b1514ec26ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.768397] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 847.768397] env[69982]: value = "task-3864642" [ 847.768397] env[69982]: _type = "Task" [ 847.768397] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.771915] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864641, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.784993] env[69982]: DEBUG nova.compute.provider_tree [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.790731] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.908925] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a506d7b3-c527-4097-8493-47b79e0d3962 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.888s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.985184] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Successfully updated port: 62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.027470] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864640, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.088232] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3884a4fa-ef02-4c8f-ab2d-e27f46240432 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.892s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.196665] env[69982]: DEBUG nova.compute.manager [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received event network-vif-plugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.196885] env[69982]: DEBUG oslo_concurrency.lockutils [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.197116] env[69982]: DEBUG oslo_concurrency.lockutils [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.197594] env[69982]: DEBUG oslo_concurrency.lockutils [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.197786] env[69982]: DEBUG nova.compute.manager [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] No waiting events found dispatching network-vif-plugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.197976] env[69982]: WARNING nova.compute.manager [req-60de4fe4-02b8-4bff-9756-c4e9b718bcd0 req-fb2ed337-1fa1-45a6-ae82-1501228a455e service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received unexpected event network-vif-plugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 for instance with vm_state building and task_state spawning. [ 848.269459] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864641, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.283987] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864642, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.292437] env[69982]: DEBUG nova.scheduler.client.report [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.489223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.489223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.489223] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 848.534031] env[69982]: DEBUG oslo_vmware.api [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864640, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.665877} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.534341] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 848.538025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 848.538025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 848.538025] env[69982]: INFO nova.compute.manager [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Took 1.64 seconds to destroy the instance on the hypervisor. [ 848.538025] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.538025] env[69982]: DEBUG nova.compute.manager [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 848.538940] env[69982]: DEBUG nova.network.neutron [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 848.592267] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.766689] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864641, 'name': Destroy_Task, 'duration_secs': 0.847121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.766689] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Destroyed the VM [ 848.766970] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 848.767174] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-beac0051-9525-4b31-9ea4-7c9e6c293b3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.779808] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 848.779808] env[69982]: value = "task-3864643" [ 848.779808] env[69982]: _type = "Task" [ 848.779808] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.788133] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864642, 'name': ReconfigVM_Task, 'duration_secs': 0.986842} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.789048] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Reconfigured VM instance instance-00000032 to attach disk [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee/c34355fa-3712-4338-942d-acdb2f8a91ee.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 848.789742] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16af7ecf-2bb5-4820-8bf8-fc3f482757d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.794863] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864643, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.797900] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.798159] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.800154] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.852s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.800697] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 848.804870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.089s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.805387] env[69982]: DEBUG nova.objects.instance [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lazy-loading 'resources' on Instance uuid 6de35617-22cf-4a32-8651-28ea67532b8f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 848.806758] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 848.806758] env[69982]: value = "task-3864644" [ 848.806758] env[69982]: _type = "Task" [ 848.806758] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.819389] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864644, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.018307] env[69982]: INFO nova.compute.manager [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Rescuing [ 849.018307] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.018307] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.018307] env[69982]: DEBUG nova.network.neutron [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 849.027049] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 849.129660] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.206650] env[69982]: DEBUG nova.network.neutron [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updating instance_info_cache with network_info: [{"id": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "address": "fa:16:3e:74:2d:e4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ecce80-43", "ovs_interfaceid": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.291843] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864643, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.304372] env[69982]: INFO nova.compute.manager [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Detaching volume bf0680bb-ab50-47a8-955a-fa309555586c [ 849.310988] env[69982]: DEBUG nova.compute.utils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.317019] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.317019] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.326914] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864644, 'name': Rename_Task, 'duration_secs': 0.321947} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.327861] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.328152] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85c99c23-452d-4df0-b8a5-1bc69b6e0fae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.336314] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 849.336314] env[69982]: value = "task-3864645" [ 849.336314] env[69982]: _type = "Task" [ 849.336314] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.352061] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864645, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.366286] env[69982]: INFO nova.virt.block_device [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Attempting to driver detach volume bf0680bb-ab50-47a8-955a-fa309555586c from mountpoint /dev/sdb [ 849.366554] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 849.366777] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767950', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'name': 'volume-bf0680bb-ab50-47a8-955a-fa309555586c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a8217447-bc22-4b84-925f-c3c09fb7228c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'serial': 'bf0680bb-ab50-47a8-955a-fa309555586c'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 849.367678] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-867efded-a974-41fa-9bc0-10c689b9de93 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.372153] env[69982]: DEBUG nova.policy [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'aca181a68a544e569dc434510bf81816', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '637314b21fea4fa0ab2b0cdf8bd53239', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.400636] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0895b8e5-dc1e-4e29-a995-56b585bef0b8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.410120] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ef7248-364a-43e4-b3fd-1190bd2c7da2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.440635] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931b4cb5-69dd-46e1-9b04-f16f4c74294f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.458793] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] The volume has not been displaced from its original location: [datastore2] volume-bf0680bb-ab50-47a8-955a-fa309555586c/volume-bf0680bb-ab50-47a8-955a-fa309555586c.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 849.464747] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfiguring VM instance instance-00000009 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 849.468258] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0063f144-67c9-4325-819b-34f97f118c75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.481860] env[69982]: DEBUG nova.network.neutron [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.489109] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Waiting for the task: (returnval){ [ 849.489109] env[69982]: value = "task-3864646" [ 849.489109] env[69982]: _type = "Task" [ 849.489109] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.501906] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.699165] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "c563267f-7699-4bd1-83cf-59ecef500ac3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.699414] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.699735] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.700039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.700260] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.703077] env[69982]: INFO nova.compute.manager [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Terminating instance [ 849.710021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.712153] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance network_info: |[{"id": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "address": "fa:16:3e:74:2d:e4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ecce80-43", "ovs_interfaceid": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 849.712277] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:2d:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62ecce80-4354-4f66-a470-0b4ef8d663a8', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.720712] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.724559] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 849.725352] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4033eca1-9a68-4daa-b447-445fd33401a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.755971] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.755971] env[69982]: value = "task-3864647" [ 849.755971] env[69982]: _type = "Task" [ 849.755971] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.774528] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864647, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.794218] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864643, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.826599] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 849.843918] env[69982]: DEBUG nova.network.neutron [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Updating instance_info_cache with network_info: [{"id": "2c91625d-56d6-4706-930e-a55980f4feb1", "address": "fa:16:3e:a4:b4:dd", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c91625d-56", "ovs_interfaceid": "2c91625d-56d6-4706-930e-a55980f4feb1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.850545] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Successfully created port: f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.859640] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864645, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.984981] env[69982]: INFO nova.compute.manager [-] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Took 1.45 seconds to deallocate network for instance. [ 850.011137] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864646, 'name': ReconfigVM_Task, 'duration_secs': 0.314306} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.012191] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Reconfigured VM instance instance-00000009 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 850.018932] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5dd6bee-e335-43d4-bd5c-9e2c8c35b79e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.047858] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Waiting for the task: (returnval){ [ 850.047858] env[69982]: value = "task-3864648" [ 850.047858] env[69982]: _type = "Task" [ 850.047858] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.057347] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6376d35-bb94-4515-ac8f-8a1813bfc227 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.065174] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864648, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.073411] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf13baed-a764-4d66-80af-db22a0227d48 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.109117] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5384e73c-6d81-41bf-bcae-82c5cfa07285 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.117992] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d4f381-0a57-46b5-bfc1-4e880c0502aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.138874] env[69982]: DEBUG nova.compute.provider_tree [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.208410] env[69982]: DEBUG nova.compute.manager [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 850.208763] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 850.209898] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43c448d-cbe5-4c8d-888b-d2018d639f56 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.219910] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 850.220243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e11df2a-11b7-4a61-a60f-d19abdf216e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.268037] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864647, 'name': CreateVM_Task, 'duration_secs': 0.390819} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.268037] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 850.268592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.268687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.269602] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 850.269602] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68da5b82-3eb5-4b68-bf99-9b3ae8a048b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.275405] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 850.275405] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3018b-4cfa-3597-59f0-bfb367171ede" [ 850.275405] env[69982]: _type = "Task" [ 850.275405] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.288763] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3018b-4cfa-3597-59f0-bfb367171ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.293962] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 850.294185] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 850.294372] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore1] c563267f-7699-4bd1-83cf-59ecef500ac3 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 850.295176] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5230cf4c-74cc-4177-a44a-5fca10b09450 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.300925] env[69982]: DEBUG oslo_vmware.api [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864643, 'name': RemoveSnapshot_Task, 'duration_secs': 1.302913} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.301728] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 850.302018] env[69982]: INFO nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Took 16.25 seconds to snapshot the instance on the hypervisor. [ 850.306511] env[69982]: DEBUG oslo_vmware.api [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 850.306511] env[69982]: value = "task-3864650" [ 850.306511] env[69982]: _type = "Task" [ 850.306511] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.315822] env[69982]: DEBUG oslo_vmware.api [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864650, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.347394] env[69982]: DEBUG nova.compute.manager [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received event network-changed-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.347394] env[69982]: DEBUG nova.compute.manager [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Refreshing instance network info cache due to event network-changed-62ecce80-4354-4f66-a470-0b4ef8d663a8. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 850.347557] env[69982]: DEBUG oslo_concurrency.lockutils [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] Acquiring lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.347750] env[69982]: DEBUG oslo_concurrency.lockutils [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] Acquired lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.347940] env[69982]: DEBUG nova.network.neutron [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Refreshing network info cache for port 62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 850.351178] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.358137] env[69982]: DEBUG oslo_vmware.api [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864645, 'name': PowerOnVM_Task, 'duration_secs': 0.59104} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.358137] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.358137] env[69982]: DEBUG nova.compute.manager [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.359157] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49308f96-6a9f-4ccc-a0cc-45cb61da339a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.499000] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.558964] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864648, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.588029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.588029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.643981] env[69982]: DEBUG nova.scheduler.client.report [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.789337] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3018b-4cfa-3597-59f0-bfb367171ede, 'name': SearchDatastore_Task, 'duration_secs': 0.020032} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.789813] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.790194] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.790773] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.790853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.791125] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.791516] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84adfb43-596b-442a-b5b2-108afc9a89fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.801951] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.802264] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.803211] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b332d36-b67c-44a7-b0dc-ce8f13201fc9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.806574] env[69982]: DEBUG nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance disappeared during snapshot {{(pid=69982) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 850.814250] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 850.814250] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a92ab9-3bdc-37d5-fc82-cd28198bc290" [ 850.814250] env[69982]: _type = "Task" [ 850.814250] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.821216] env[69982]: DEBUG oslo_vmware.api [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864650, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.444508} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.822199] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.822199] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 850.822199] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 850.822351] env[69982]: INFO nova.compute.manager [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Took 0.61 seconds to destroy the instance on the hypervisor. [ 850.823202] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 850.823532] env[69982]: DEBUG nova.compute.manager [None req-fb2b647c-e9d3-458f-a90a-a07d43e5864b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image not found during clean up 6c11027b-62b0-4a6b-967c-97023308d763 {{(pid=69982) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 850.824606] env[69982]: DEBUG nova.compute.manager [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 850.824717] env[69982]: DEBUG nova.network.neutron [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 850.832989] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a92ab9-3bdc-37d5-fc82-cd28198bc290, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.837905] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 850.867409] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 850.867700] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.868072] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 850.868204] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.868497] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 850.868786] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 850.869169] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 850.869419] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 850.869682] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 850.869901] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 850.870154] env[69982]: DEBUG nova.virt.hardware [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 850.874949] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a301a81a-b2d8-46a7-9ac9-4e3fe1521055 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.883505] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.890852] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb56467-e70e-4642-8152-ee04efd448a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.060338] env[69982]: DEBUG oslo_vmware.api [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Task: {'id': task-3864648, 'name': ReconfigVM_Task, 'duration_secs': 0.812347} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.063238] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767950', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'name': 'volume-bf0680bb-ab50-47a8-955a-fa309555586c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a8217447-bc22-4b84-925f-c3c09fb7228c', 'attached_at': '', 'detached_at': '', 'volume_id': 'bf0680bb-ab50-47a8-955a-fa309555586c', 'serial': 'bf0680bb-ab50-47a8-955a-fa309555586c'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 851.100442] env[69982]: DEBUG nova.network.neutron [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updated VIF entry in instance network info cache for port 62ecce80-4354-4f66-a470-0b4ef8d663a8. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 851.100924] env[69982]: DEBUG nova.network.neutron [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updating instance_info_cache with network_info: [{"id": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "address": "fa:16:3e:74:2d:e4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ecce80-43", "ovs_interfaceid": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.149727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.345s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.154891] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.523s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.154891] env[69982]: DEBUG nova.objects.instance [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'resources' on Instance uuid d5c23433-a0f3-4f0a-9c62-051d07dcd712 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.192831] env[69982]: INFO nova.scheduler.client.report [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Deleted allocations for instance 6de35617-22cf-4a32-8651-28ea67532b8f [ 851.241433] env[69982]: DEBUG nova.compute.manager [req-73b760a2-d029-4302-9508-c71210bf3be1 req-a8dc499d-01e0-4e42-97da-976cd7708196 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Received event network-vif-deleted-82903e23-208f-46d2-81eb-689af6de8c42 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.241657] env[69982]: INFO nova.compute.manager [req-73b760a2-d029-4302-9508-c71210bf3be1 req-a8dc499d-01e0-4e42-97da-976cd7708196 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Neutron deleted interface 82903e23-208f-46d2-81eb-689af6de8c42; detaching it from the instance and deleting it from the info cache [ 851.241863] env[69982]: DEBUG nova.network.neutron [req-73b760a2-d029-4302-9508-c71210bf3be1 req-a8dc499d-01e0-4e42-97da-976cd7708196 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.327645] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a92ab9-3bdc-37d5-fc82-cd28198bc290, 'name': SearchDatastore_Task, 'duration_secs': 0.01621} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.328625] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee968e4b-d96f-4ab4-966d-cb6a47d654bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.336675] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 851.336675] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217bfbd-1efa-1042-7fc0-2b7310895645" [ 851.336675] env[69982]: _type = "Task" [ 851.336675] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.350018] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217bfbd-1efa-1042-7fc0-2b7310895645, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.606130] env[69982]: DEBUG oslo_concurrency.lockutils [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] Releasing lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.606130] env[69982]: DEBUG nova.compute.manager [req-d0151887-4d40-45b3-afb5-e2354b5a5517 req-c1818a72-1d0d-405e-9af1-65c6280db21d service nova] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Received event network-vif-deleted-c905d5aa-25d9-454f-8fbf-4f3622b4d9fc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.622818] env[69982]: DEBUG nova.objects.instance [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lazy-loading 'flavor' on Instance uuid a8217447-bc22-4b84-925f-c3c09fb7228c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.691773] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Successfully updated port: f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.708787] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3dd8d184-a3ef-4112-b119-9fa588056ef5 tempest-ListServerFiltersTestJSON-1459211445 tempest-ListServerFiltersTestJSON-1459211445-project-member] Lock "6de35617-22cf-4a32-8651-28ea67532b8f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.588s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.713782] env[69982]: DEBUG nova.network.neutron [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.747677] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0497956f-06b5-43d9-97c7-bb68f996826d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.761702] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b67e5d-ce8e-474f-9f12-2c99b08cee66 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.807217] env[69982]: DEBUG nova.compute.manager [req-73b760a2-d029-4302-9508-c71210bf3be1 req-a8dc499d-01e0-4e42-97da-976cd7708196 service nova] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Detach interface failed, port_id=82903e23-208f-46d2-81eb-689af6de8c42, reason: Instance c563267f-7699-4bd1-83cf-59ecef500ac3 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 851.815327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "c34355fa-3712-4338-942d-acdb2f8a91ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.815788] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.815885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "c34355fa-3712-4338-942d-acdb2f8a91ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.815990] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.816173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.818436] env[69982]: INFO nova.compute.manager [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Terminating instance [ 851.851479] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217bfbd-1efa-1042-7fc0-2b7310895645, 'name': SearchDatastore_Task, 'duration_secs': 0.041593} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.854699] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.854814] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 881cbfae-7630-45e0-a8ad-b2cd283689ea/881cbfae-7630-45e0-a8ad-b2cd283689ea.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 851.855311] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fff67218-013b-4011-89bb-2dd9d20017b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.864566] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 851.864566] env[69982]: value = "task-3864651" [ 851.864566] env[69982]: _type = "Task" [ 851.864566] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.876299] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.894573] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 851.894899] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-efd90025-bcb3-4c48-b921-7f2c902b8422 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.904237] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 851.904237] env[69982]: value = "task-3864652" [ 851.904237] env[69982]: _type = "Task" [ 851.904237] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.913392] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.197321] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.197453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquired lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.197727] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 852.219645] env[69982]: INFO nova.compute.manager [-] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Took 1.39 seconds to deallocate network for instance. [ 852.321855] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437da7ba-37e6-446d-9ab0-05a2c2fe0e11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.325491] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "refresh_cache-c34355fa-3712-4338-942d-acdb2f8a91ee" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.325744] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquired lock "refresh_cache-c34355fa-3712-4338-942d-acdb2f8a91ee" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.325962] env[69982]: DEBUG nova.network.neutron [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 852.334284] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1613d6-bb62-4d2d-901f-0f66fed1e381 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.377646] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee29a84c-c0d8-4982-816c-6ba5182d99dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.391370] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72beaa46-8d70-4b2b-9d41-e08e77747f5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.395850] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864651, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.411255] env[69982]: DEBUG nova.compute.provider_tree [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.425860] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864652, 'name': PowerOffVM_Task, 'duration_secs': 0.207551} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.426182] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 852.427043] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664babce-0f52-4fe6-b965-c23ecd8d4199 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.435664] env[69982]: DEBUG nova.compute.manager [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Received event network-vif-plugged-f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.435959] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Acquiring lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.436235] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.436503] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.436650] env[69982]: DEBUG nova.compute.manager [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] No waiting events found dispatching network-vif-plugged-f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.436794] env[69982]: WARNING nova.compute.manager [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Received unexpected event network-vif-plugged-f2644b3d-9f1e-40f2-8c76-55c4ed912488 for instance with vm_state building and task_state spawning. [ 852.437017] env[69982]: DEBUG nova.compute.manager [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Received event network-changed-f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.437217] env[69982]: DEBUG nova.compute.manager [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Refreshing instance network info cache due to event network-changed-f2644b3d-9f1e-40f2-8c76-55c4ed912488. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 852.437419] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Acquiring lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.458427] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc46fb5c-9a69-4ba8-bc89-f2375af5b9c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.510562] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.510947] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-711cb526-fb2b-4d7c-9cdc-c906fda0166a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.519675] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 852.519675] env[69982]: value = "task-3864653" [ 852.519675] env[69982]: _type = "Task" [ 852.519675] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.533413] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 852.533413] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 852.533413] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.533588] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.533680] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 852.533950] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-333c2c46-2ab8-450a-b153-790c26732d6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.544921] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 852.545167] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 852.545955] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b67c8a29-c2bf-40ae-b202-496ff2bcbe50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.552854] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 852.552854] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528dafbe-250d-2fcf-af7f-411b926205b8" [ 852.552854] env[69982]: _type = "Task" [ 852.552854] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.561843] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528dafbe-250d-2fcf-af7f-411b926205b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.636023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb006886-8699-48c3-a51b-8a41af5ca124 tempest-VolumesAssistedSnapshotsTest-715863651 tempest-VolumesAssistedSnapshotsTest-715863651-project-admin] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.837s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.731036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.748722] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 852.856974] env[69982]: DEBUG nova.network.neutron [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 852.883741] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597729} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.886560] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 881cbfae-7630-45e0-a8ad-b2cd283689ea/881cbfae-7630-45e0-a8ad-b2cd283689ea.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 852.887438] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 852.887438] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8821d414-d6cb-45f5-bba4-17431ea8fbb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.895675] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 852.895675] env[69982]: value = "task-3864654" [ 852.895675] env[69982]: _type = "Task" [ 852.895675] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.907987] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.917666] env[69982]: DEBUG nova.scheduler.client.report [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.945468] env[69982]: DEBUG nova.network.neutron [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.960460] env[69982]: DEBUG nova.network.neutron [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Updating instance_info_cache with network_info: [{"id": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "address": "fa:16:3e:50:4c:7c", "network": {"id": "aa8b3408-496f-48d9-a2f9-18cbef4644c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1984690010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637314b21fea4fa0ab2b0cdf8bd53239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2644b3d-9f", "ovs_interfaceid": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.069580] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528dafbe-250d-2fcf-af7f-411b926205b8, 'name': SearchDatastore_Task, 'duration_secs': 0.011613} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.069580] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89a752c8-0283-421b-8ece-7c6b1a16352d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.075795] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 853.075795] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f83303-def2-e0d6-c785-ed2c8dda504e" [ 853.075795] env[69982]: _type = "Task" [ 853.075795] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.087164] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f83303-def2-e0d6-c785-ed2c8dda504e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.409651] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079407} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.410215] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 853.411287] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba63138-fc7e-45c8-bfa2-f6a9aeae0be7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.427125] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.438145] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 881cbfae-7630-45e0-a8ad-b2cd283689ea/881cbfae-7630-45e0-a8ad-b2cd283689ea.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 853.439106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.994s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.439484] env[69982]: DEBUG nova.objects.instance [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lazy-loading 'resources' on Instance uuid 589419ea-c609-45bb-bde5-3b22d9ff111e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.440957] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7705ab9a-f7b1-401f-bfef-5059557ad87c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.456559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Releasing lock "refresh_cache-c34355fa-3712-4338-942d-acdb2f8a91ee" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.457024] env[69982]: DEBUG nova.compute.manager [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 853.457166] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 853.458380] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04221047-28b5-4dcf-b2ca-2d0ca3a17c15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.463029] env[69982]: INFO nova.scheduler.client.report [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocations for instance d5c23433-a0f3-4f0a-9c62-051d07dcd712 [ 853.467909] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Releasing lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.467909] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Instance network_info: |[{"id": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "address": "fa:16:3e:50:4c:7c", "network": {"id": "aa8b3408-496f-48d9-a2f9-18cbef4644c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1984690010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637314b21fea4fa0ab2b0cdf8bd53239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2644b3d-9f", "ovs_interfaceid": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 853.469495] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Acquired lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.469716] env[69982]: DEBUG nova.network.neutron [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Refreshing network info cache for port f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 853.470980] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:4c:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7d2575f-b92f-44ec-a863-634cb76631a2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2644b3d-9f1e-40f2-8c76-55c4ed912488', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.480142] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Creating folder: Project (637314b21fea4fa0ab2b0cdf8bd53239). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.481485] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 853.481485] env[69982]: value = "task-3864656" [ 853.481485] env[69982]: _type = "Task" [ 853.481485] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.486489] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3bcb8c6-e319-4b99-9150-d41f9ec1b8b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.488829] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 853.493588] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42ec3daf-e88b-45e4-bc76-516851e1bc54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.507444] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864656, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.511048] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Created folder: Project (637314b21fea4fa0ab2b0cdf8bd53239) in parent group-v767796. [ 853.511311] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Creating folder: Instances. Parent ref: group-v767956. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.511968] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 853.511968] env[69982]: value = "task-3864658" [ 853.511968] env[69982]: _type = "Task" [ 853.511968] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.513070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.513070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.513070] env[69982]: DEBUG nova.compute.manager [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.513070] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-485b7a51-990b-4422-ac95-d0beaa4bf280 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.516964] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d159e4e-e59d-4c14-b683-30d70e15696e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.532231] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.534413] env[69982]: DEBUG nova.compute.manager [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 853.535327] env[69982]: DEBUG nova.objects.instance [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'flavor' on Instance uuid 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 853.589331] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f83303-def2-e0d6-c785-ed2c8dda504e, 'name': SearchDatastore_Task, 'duration_secs': 0.032457} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.589331] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.589566] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 853.591878] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13124cca-a2bc-4731-bdb3-1a2871b92e29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.594444] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Created folder: Instances in parent group-v767956. [ 853.594664] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 853.594927] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.595771] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-122cc106-c4f3-48fa-b40b-0dc239ab5906 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.619082] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 853.619082] env[69982]: value = "task-3864660" [ 853.619082] env[69982]: _type = "Task" [ 853.619082] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.625842] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.625842] env[69982]: value = "task-3864661" [ 853.625842] env[69982]: _type = "Task" [ 853.625842] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.635690] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.642370] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864661, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.822664] env[69982]: DEBUG nova.network.neutron [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Updated VIF entry in instance network info cache for port f2644b3d-9f1e-40f2-8c76-55c4ed912488. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 853.824033] env[69982]: DEBUG nova.network.neutron [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Updating instance_info_cache with network_info: [{"id": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "address": "fa:16:3e:50:4c:7c", "network": {"id": "aa8b3408-496f-48d9-a2f9-18cbef4644c3", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1984690010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "637314b21fea4fa0ab2b0cdf8bd53239", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7d2575f-b92f-44ec-a863-634cb76631a2", "external-id": "nsx-vlan-transportzone-794", "segmentation_id": 794, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2644b3d-9f", "ovs_interfaceid": "f2644b3d-9f1e-40f2-8c76-55c4ed912488", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.972697] env[69982]: DEBUG oslo_concurrency.lockutils [None req-34fe6682-9b31-427b-ad72-8ee27bdcb49a tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "d5c23433-a0f3-4f0a-9c62-051d07dcd712" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.911s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.006981] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864656, 'name': ReconfigVM_Task, 'duration_secs': 0.402784} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.007339] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 881cbfae-7630-45e0-a8ad-b2cd283689ea/881cbfae-7630-45e0-a8ad-b2cd283689ea.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.008132] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ec7f5d2-5249-4eab-90b1-88880bc5584d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.016941] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 854.016941] env[69982]: value = "task-3864662" [ 854.016941] env[69982]: _type = "Task" [ 854.016941] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.033096] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864658, 'name': PowerOffVM_Task, 'duration_secs': 0.146223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.037013] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.037013] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.037247] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864662, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.040048] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef4bfc39-19ba-4838-a2a9-e1a679e45097 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.077562] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.077562] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.077738] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Deleting the datastore file [datastore2] c34355fa-3712-4338-942d-acdb2f8a91ee {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.081126] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a03b65c0-ab63-443d-9d69-9ba2997bacbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.088686] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for the task: (returnval){ [ 854.088686] env[69982]: value = "task-3864664" [ 854.088686] env[69982]: _type = "Task" [ 854.088686] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.101492] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.136754] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864660, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.143882] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864661, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.326540] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5c158ab-0e43-44cf-96dc-bd4fa25689d9 req-2035063c-7d2e-4eb0-b28a-2ec3ffbdc114 service nova] Releasing lock "refresh_cache-9dcaa045-83c6-4e74-881d-a85a1991dbe3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.537353] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864662, 'name': Rename_Task, 'duration_secs': 0.26679} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.537721] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.537992] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76554182-ee79-4fcc-a48b-55f97e777010 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.546314] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.546669] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 854.546669] env[69982]: value = "task-3864665" [ 854.546669] env[69982]: _type = "Task" [ 854.546669] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.549640] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7ad06ce-93b0-4580-952f-df37324645eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.565251] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864665, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.566141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2e038d-0d71-4f0e-94a6-222032edb23d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.569651] env[69982]: DEBUG oslo_vmware.api [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 854.569651] env[69982]: value = "task-3864666" [ 854.569651] env[69982]: _type = "Task" [ 854.569651] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.579212] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f583e0a1-600a-4ad5-9832-43392bdbbbfd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.587580] env[69982]: DEBUG oslo_vmware.api [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.627778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6a53df-ccc2-4d5d-90db-46f0cd345c80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.635232] env[69982]: DEBUG oslo_vmware.api [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Task: {'id': task-3864664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221318} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.638456] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.638456] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.638658] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.638843] env[69982]: INFO nova.compute.manager [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Took 1.18 seconds to destroy the instance on the hypervisor. [ 854.639190] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.639436] env[69982]: DEBUG nova.compute.manager [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 854.639674] env[69982]: DEBUG nova.network.neutron [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 854.647315] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864660, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.636188} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.648016] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 854.649423] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87874a6-8f9a-4ad0-9d72-a5c8fe230bac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.658692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7863ef-0367-4c4a-9fa9-758327cf3e6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.661434] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864661, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.674318] env[69982]: DEBUG nova.compute.provider_tree [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.697682] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 854.699726] env[69982]: DEBUG nova.scheduler.client.report [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.704105] env[69982]: DEBUG nova.network.neutron [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 854.706246] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4987d043-7a94-42ba-ac78-aca45b460b50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.726626] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.287s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.728848] env[69982]: DEBUG nova.network.neutron [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.730305] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.213s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.730305] env[69982]: DEBUG nova.objects.instance [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 854.740764] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 854.740764] env[69982]: value = "task-3864667" [ 854.740764] env[69982]: _type = "Task" [ 854.740764] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.753365] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.767688] env[69982]: INFO nova.scheduler.client.report [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Deleted allocations for instance 589419ea-c609-45bb-bde5-3b22d9ff111e [ 855.067393] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864665, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.082463] env[69982]: DEBUG oslo_vmware.api [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864666, 'name': PowerOffVM_Task, 'duration_secs': 0.234106} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.082759] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.082759] env[69982]: DEBUG nova.compute.manager [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.083690] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33823c4-ca79-4bb0-9983-d74153d659f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.147972] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864661, 'name': CreateVM_Task, 'duration_secs': 1.405696} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.148180] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.148960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.149147] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.149475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 855.149745] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2a7179-0c91-47d0-b4c0-367373a207d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.157035] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 855.157035] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524c9eba-3421-1223-b0d1-6bc7251ca451" [ 855.157035] env[69982]: _type = "Task" [ 855.157035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.169954] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524c9eba-3421-1223-b0d1-6bc7251ca451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.238031] env[69982]: INFO nova.compute.manager [-] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Took 0.60 seconds to deallocate network for instance. [ 855.265342] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864667, 'name': ReconfigVM_Task, 'duration_secs': 0.509759} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.265342] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Reconfigured VM instance instance-00000034 to attach disk [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 855.266691] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caea2c10-12af-480c-8766-102431672ddf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.306780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1c998248-4bd5-4030-9dfa-aa40c3ec3359 tempest-FloatingIPsAssociationTestJSON-540795601 tempest-FloatingIPsAssociationTestJSON-540795601-project-member] Lock "589419ea-c609-45bb-bde5-3b22d9ff111e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.204s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.308106] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc751cb7-6da0-467a-aa25-fe2ab7569fc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.326510] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 855.326510] env[69982]: value = "task-3864668" [ 855.326510] env[69982]: _type = "Task" [ 855.326510] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.337966] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864668, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.564811] env[69982]: DEBUG oslo_vmware.api [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864665, 'name': PowerOnVM_Task, 'duration_secs': 0.544792} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.564811] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.564811] env[69982]: INFO nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Took 8.58 seconds to spawn the instance on the hypervisor. [ 855.564811] env[69982]: DEBUG nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.564811] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6164ca-8576-47c6-9178-39128a898d88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.600023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fe06d776-bf7a-43aa-b145-c7b3a78cd393 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.086s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.674587] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524c9eba-3421-1223-b0d1-6bc7251ca451, 'name': SearchDatastore_Task, 'duration_secs': 0.030529} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.674971] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.675272] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.675579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.675734] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.676020] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.677352] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-10d8c08f-723d-4512-94ac-cec04f2a6318 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.691654] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.691965] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.693305] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e31761c8-5fdc-4222-b41c-bf57fc833b36 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.703842] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 855.703842] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5240cf20-3a7f-d97a-709e-a5c4ae8af67b" [ 855.703842] env[69982]: _type = "Task" [ 855.703842] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.714655] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5240cf20-3a7f-d97a-709e-a5c4ae8af67b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.748242] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3fcdd57c-9324-46ab-8dee-acfebbf6e335 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.749950] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 34.920s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.750255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.750433] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 855.750827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.403s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.752446] env[69982]: INFO nova.compute.claims [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.757233] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65ee319-3cef-4789-b7c8-7dc1ce5b0183 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.761804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.771554] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4fe3e3-5fd2-4544-b886-df9e5a3d1bd2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.793949] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19620bb-82a4-4584-a738-1bba0bb6e6bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.803046] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe96c7a-527b-4402-8c11-a16101d6a57a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.837396] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178413MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 855.837598] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.847529] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864668, 'name': ReconfigVM_Task, 'duration_secs': 0.471247} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.847710] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 855.848054] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5caa016d-a62c-427b-ae67-0cab5daf3f2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.858533] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 855.858533] env[69982]: value = "task-3864669" [ 855.858533] env[69982]: _type = "Task" [ 855.858533] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.868227] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864669, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.091903] env[69982]: INFO nova.compute.manager [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Took 53.32 seconds to build instance. [ 856.169279] env[69982]: DEBUG nova.objects.instance [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'flavor' on Instance uuid 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.218651] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5240cf20-3a7f-d97a-709e-a5c4ae8af67b, 'name': SearchDatastore_Task, 'duration_secs': 0.013526} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.218651] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d77ac0fe-6729-4b02-858c-a1757208bb70 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.225581] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 856.225581] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5208123f-615d-7160-0e99-08b99d653161" [ 856.225581] env[69982]: _type = "Task" [ 856.225581] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.237226] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5208123f-615d-7160-0e99-08b99d653161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.371310] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864669, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.595465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f986b7fd-dabb-4315-a9ab-1db3de7905b7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.820s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.676240] env[69982]: DEBUG oslo_concurrency.lockutils [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.676410] env[69982]: DEBUG oslo_concurrency.lockutils [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.676584] env[69982]: DEBUG nova.network.neutron [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 856.677399] env[69982]: DEBUG nova.objects.instance [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'info_cache' on Instance uuid 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 856.741650] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5208123f-615d-7160-0e99-08b99d653161, 'name': SearchDatastore_Task, 'duration_secs': 0.012402} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.741964] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.742314] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9dcaa045-83c6-4e74-881d-a85a1991dbe3/9dcaa045-83c6-4e74-881d-a85a1991dbe3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.742602] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e88c6f26-952d-48ae-afb3-dcde4da99a6e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.755747] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 856.755747] env[69982]: value = "task-3864670" [ 856.755747] env[69982]: _type = "Task" [ 856.755747] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.771028] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864670, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.873367] env[69982]: DEBUG oslo_vmware.api [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864669, 'name': PowerOnVM_Task, 'duration_secs': 0.740127} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.876516] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 856.880042] env[69982]: DEBUG nova.compute.manager [None req-17ca9045-e86d-44f3-953f-98b09aa0ec19 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 856.880844] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7279da-2e0c-4865-b838-5bcac1e603a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.070772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "a3e3106d-b7df-49c8-9341-a843977aefe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.071077] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.097560] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 857.183013] env[69982]: DEBUG nova.objects.base [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Object Instance<4597a0b8-2c04-4755-8e0d-e00e5cdaacd7> lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 857.200024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.200024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.200024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.200024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.200720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.201773] env[69982]: INFO nova.compute.manager [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Terminating instance [ 857.277976] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864670, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.406148] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20afaf5-9bd3-447f-b800-c5e594ca7fb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.419144] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c70243-ea3a-42d1-9ec1-1d3ed535bad7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.455268] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2097107-efd9-42c3-bae0-3b10f046a65e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.465042] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482a6629-786d-4164-82da-1d03c78440cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.483874] env[69982]: DEBUG nova.compute.provider_tree [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.633589] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.713267] env[69982]: DEBUG nova.compute.manager [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 857.713665] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.715029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b11e734-f4bd-46ea-ae10-2468f3274f7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.728958] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.729684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c43ada90-8aa2-4b18-a660-87851c8a98e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.739769] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 857.739769] env[69982]: value = "task-3864671" [ 857.739769] env[69982]: _type = "Task" [ 857.739769] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.751282] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.772860] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864670, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.739474} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.773200] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9dcaa045-83c6-4e74-881d-a85a1991dbe3/9dcaa045-83c6-4e74-881d-a85a1991dbe3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.773430] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.773749] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1df0d24-805a-48e6-ae6f-c7817f8d1b47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.784047] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 857.784047] env[69982]: value = "task-3864672" [ 857.784047] env[69982]: _type = "Task" [ 857.784047] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.796064] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864672, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.988102] env[69982]: DEBUG nova.scheduler.client.report [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.031402] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.031559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.031737] env[69982]: INFO nova.compute.manager [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Shelving [ 858.204323] env[69982]: DEBUG nova.network.neutron [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.250691] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864671, 'name': PowerOffVM_Task, 'duration_secs': 0.267119} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.250978] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 858.251165] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 858.251420] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-da69d669-f669-4644-95bb-74e328a3a683 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.295051] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864672, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113928} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.295544] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 858.297366] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ba065f-3950-4cc8-8d46-90cab5e88d6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.329048] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 9dcaa045-83c6-4e74-881d-a85a1991dbe3/9dcaa045-83c6-4e74-881d-a85a1991dbe3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.330610] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c6c6e23-5cb0-4e91-add8-61d8fb73efb9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.347797] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 858.348718] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 858.348718] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Deleting the datastore file [datastore2] a8217447-bc22-4b84-925f-c3c09fb7228c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 858.350022] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cab0d081-e319-4672-84a7-d7f3acfc6639 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.358454] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 858.358454] env[69982]: value = "task-3864675" [ 858.358454] env[69982]: _type = "Task" [ 858.358454] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.360639] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for the task: (returnval){ [ 858.360639] env[69982]: value = "task-3864674" [ 858.360639] env[69982]: _type = "Task" [ 858.360639] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.372770] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.376410] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864675, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.497937] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.501438] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 858.503845] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.902s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.504449] env[69982]: DEBUG nova.objects.instance [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lazy-loading 'resources' on Instance uuid fae97132-44b4-4df1-bd34-ba694ea7016a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.708455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.874972] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864675, 'name': ReconfigVM_Task, 'duration_secs': 0.319582} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.878414] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 9dcaa045-83c6-4e74-881d-a85a1991dbe3/9dcaa045-83c6-4e74-881d-a85a1991dbe3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.879230] env[69982]: DEBUG oslo_vmware.api [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Task: {'id': task-3864674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269555} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.879462] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3d98782-a944-40d8-aff3-ff59ea438dc5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.881515] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.881627] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.881906] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.882026] env[69982]: INFO nova.compute.manager [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Took 1.17 seconds to destroy the instance on the hypervisor. [ 858.882247] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 858.882465] env[69982]: DEBUG nova.compute.manager [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 858.882655] env[69982]: DEBUG nova.network.neutron [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 858.891803] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 858.891803] env[69982]: value = "task-3864676" [ 858.891803] env[69982]: _type = "Task" [ 858.891803] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.902135] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864676, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.008236] env[69982]: DEBUG nova.compute.utils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 859.010382] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 859.010594] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.048904] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.049226] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e57d56a4-c6fb-4f0d-83a0-39a6c1581264 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.059077] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 859.059077] env[69982]: value = "task-3864677" [ 859.059077] env[69982]: _type = "Task" [ 859.059077] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.070411] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864677, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.113397] env[69982]: DEBUG nova.policy [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5eb7bc6d38764da0a4f06fc1c16a2a62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8cfacb73f8e46db9147929df2af33e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 859.406786] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864676, 'name': Rename_Task, 'duration_secs': 0.152678} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.410617] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.411102] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69c86a1f-c471-4b57-ac94-04410c097567 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.420494] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 859.420494] env[69982]: value = "task-3864678" [ 859.420494] env[69982]: _type = "Task" [ 859.420494] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.436210] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.520966] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 859.574386] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864677, 'name': PowerOffVM_Task, 'duration_secs': 0.400709} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.575538] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.576699] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70d4657-c641-40b6-8a90-ec985e960fc7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.609791] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f0ab61-79d3-44f9-ba53-19804be51343 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.645621] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Successfully created port: 0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.649694] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4a7d55-aa58-4fd6-9646-5fbf7efb179a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.656961] env[69982]: DEBUG nova.compute.manager [req-7153c5ac-019e-4f83-97a2-cdac5999eec7 req-30a47829-2dd0-4854-8561-a6361f02bee1 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Received event network-vif-deleted-79537ce6-7e7e-4621-b2a8-e38c01d51f7d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 859.657267] env[69982]: INFO nova.compute.manager [req-7153c5ac-019e-4f83-97a2-cdac5999eec7 req-30a47829-2dd0-4854-8561-a6361f02bee1 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Neutron deleted interface 79537ce6-7e7e-4621-b2a8-e38c01d51f7d; detaching it from the instance and deleting it from the info cache [ 859.657401] env[69982]: DEBUG nova.network.neutron [req-7153c5ac-019e-4f83-97a2-cdac5999eec7 req-30a47829-2dd0-4854-8561-a6361f02bee1 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.662582] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9628b90d-fae7-4204-8af1-331acc644091 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.700095] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d696ca3-6cb8-4cb5-bf58-9e2d203913ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.709512] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3c3d48-fc51-4471-9ec9-5baa908351d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.726032] env[69982]: DEBUG nova.compute.provider_tree [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.727909] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.728183] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ab06238-81ef-409d-917c-274744874e94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.739221] env[69982]: DEBUG oslo_vmware.api [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 859.739221] env[69982]: value = "task-3864679" [ 859.739221] env[69982]: _type = "Task" [ 859.739221] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.747220] env[69982]: DEBUG oslo_vmware.api [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864679, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.939851] env[69982]: DEBUG oslo_vmware.api [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864678, 'name': PowerOnVM_Task, 'duration_secs': 0.507648} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.940021] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.940376] env[69982]: INFO nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Took 9.10 seconds to spawn the instance on the hypervisor. [ 859.940690] env[69982]: DEBUG nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 859.942692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695e75a5-8c02-4ace-9d1e-2494e30ea7de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.120702] env[69982]: DEBUG nova.network.neutron [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.128048] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 860.128437] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f2defce4-521a-4094-9510-1aa330c623e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.139162] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 860.139162] env[69982]: value = "task-3864680" [ 860.139162] env[69982]: _type = "Task" [ 860.139162] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.150450] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864680, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.159954] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e95cede3-f05e-4cab-aef2-56ab7c8c2608 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.174583] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9531a38f-e4eb-4759-9940-5fdd3d5c5da8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.216671] env[69982]: DEBUG nova.compute.manager [req-7153c5ac-019e-4f83-97a2-cdac5999eec7 req-30a47829-2dd0-4854-8561-a6361f02bee1 service nova] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Detach interface failed, port_id=79537ce6-7e7e-4621-b2a8-e38c01d51f7d, reason: Instance a8217447-bc22-4b84-925f-c3c09fb7228c could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 860.229411] env[69982]: DEBUG nova.scheduler.client.report [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 860.249179] env[69982]: DEBUG oslo_vmware.api [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864679, 'name': PowerOnVM_Task, 'duration_secs': 0.492832} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.249469] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.251754] env[69982]: DEBUG nova.compute.manager [None req-53175f8a-2640-44f8-8e40-ba444c7b19fc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.251754] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17fb718-c483-419d-9dc0-3f6b8ad00015 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.466500] env[69982]: INFO nova.compute.manager [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Took 54.69 seconds to build instance. [ 860.534966] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 860.564900] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 860.565288] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.565460] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 860.565656] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.566076] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 860.566266] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 860.566486] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 860.566647] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 860.566815] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 860.566998] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 860.567221] env[69982]: DEBUG nova.virt.hardware [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 860.568139] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2344f20e-c7db-4422-843d-b303a65342ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.577122] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0136ee-e52e-46c2-ac6d-de31990cf5fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.581598] env[69982]: INFO nova.compute.manager [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Rescuing [ 860.581854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.582027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.582188] env[69982]: DEBUG nova.network.neutron [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 860.622661] env[69982]: INFO nova.compute.manager [-] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Took 1.74 seconds to deallocate network for instance. [ 860.651148] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864680, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.737271] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.231s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.738417] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.625s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.740172] env[69982]: INFO nova.compute.claims [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.806101] env[69982]: INFO nova.scheduler.client.report [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Deleted allocations for instance fae97132-44b4-4df1-bd34-ba694ea7016a [ 860.969907] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9c6a2de8-8c3c-403f-a5d7-10d0c0147bfa tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.218s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.133771] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.159295] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864680, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.316039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-397aa7e5-ee9e-4fe1-b6c2-78d866bae759 tempest-ServersListShow2100Test-1423413313 tempest-ServersListShow2100Test-1423413313-project-member] Lock "fae97132-44b4-4df1-bd34-ba694ea7016a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.934s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.382302] env[69982]: DEBUG nova.network.neutron [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updating instance_info_cache with network_info: [{"id": "b5a29604-4742-41d4-b219-8bbd59089c94", "address": "fa:16:3e:5d:73:4d", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a29604-47", "ovs_interfaceid": "b5a29604-4742-41d4-b219-8bbd59089c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.475583] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 861.653572] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864680, 'name': CreateSnapshot_Task, 'duration_secs': 1.355399} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.653845] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 861.655125] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9ad52a-89c0-4e56-b47c-e6b8f9fbc65e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.667436] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.667690] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.669020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.669020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.669020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.673288] env[69982]: INFO nova.compute.manager [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Terminating instance [ 861.701116] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Successfully updated port: 0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.703661] env[69982]: DEBUG nova.compute.manager [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Received event network-vif-plugged-0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.703877] env[69982]: DEBUG oslo_concurrency.lockutils [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] Acquiring lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.704106] env[69982]: DEBUG oslo_concurrency.lockutils [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.704272] env[69982]: DEBUG oslo_concurrency.lockutils [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.704437] env[69982]: DEBUG nova.compute.manager [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] No waiting events found dispatching network-vif-plugged-0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 861.704608] env[69982]: WARNING nova.compute.manager [req-4cd472a6-bdbb-4d16-9a54-82cd8b42f365 req-e7dec9e9-e143-4c5a-b10c-36b9e877d1e7 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Received unexpected event network-vif-plugged-0f022a40-3503-463d-a390-efa4ea123ef8 for instance with vm_state building and task_state spawning. [ 861.887769] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.006040] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.183151] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 862.183912] env[69982]: DEBUG nova.compute.manager [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 862.184131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.184387] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-37b3aceb-9f5f-49dd-8dba-91846b3197f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.191365] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e107e888-dfaa-47cc-a76b-90c8244cb236 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.200762] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.201953] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ce16ee8-3cf1-4a1f-adb8-513a17f104bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.206880] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 862.206880] env[69982]: value = "task-3864681" [ 862.206880] env[69982]: _type = "Task" [ 862.206880] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.211142] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.211296] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.211626] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 862.219951] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 862.219951] env[69982]: value = "task-3864682" [ 862.219951] env[69982]: _type = "Task" [ 862.219951] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.230541] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864681, 'name': CloneVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.237907] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.413728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05cf3ec8-264f-4cee-80b9-5b7a33d2c5e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.422931] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6f9387-3302-4288-a611-a5e3c3c443ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.463676] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e000153f-53fd-4464-8330-6e83320f4f5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.474623] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-005e72db-357f-45ba-a8f7-e6e0a41a6ec5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.491783] env[69982]: DEBUG nova.compute.provider_tree [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 862.728229] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864681, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.738559] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864682, 'name': PowerOffVM_Task, 'duration_secs': 0.230079} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.738868] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.739489] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.739489] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9bc7fd26-d326-47db-b893-0535e00f24b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.775755] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 862.807634] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.808228] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.810320] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Deleting the datastore file [datastore2] 9dcaa045-83c6-4e74-881d-a85a1991dbe3 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.812479] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17a1f025-c39e-4794-8b13-953efb4987aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.822637] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for the task: (returnval){ [ 862.822637] env[69982]: value = "task-3864684" [ 862.822637] env[69982]: _type = "Task" [ 862.822637] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.831798] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864684, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.958396] env[69982]: DEBUG nova.network.neutron [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Updating instance_info_cache with network_info: [{"id": "0f022a40-3503-463d-a390-efa4ea123ef8", "address": "fa:16:3e:20:8e:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f022a40-35", "ovs_interfaceid": "0f022a40-3503-463d-a390-efa4ea123ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.996471] env[69982]: DEBUG nova.scheduler.client.report [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.225747] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864681, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.338068] env[69982]: DEBUG oslo_vmware.api [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Task: {'id': task-3864684, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372785} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.338465] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.338738] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.338982] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.342512] env[69982]: INFO nova.compute.manager [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 863.343456] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.345830] env[69982]: DEBUG nova.compute.manager [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.345830] env[69982]: DEBUG nova.network.neutron [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 863.461919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.462362] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Instance network_info: |[{"id": "0f022a40-3503-463d-a390-efa4ea123ef8", "address": "fa:16:3e:20:8e:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f022a40-35", "ovs_interfaceid": "0f022a40-3503-463d-a390-efa4ea123ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 863.462829] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:8e:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f022a40-3503-463d-a390-efa4ea123ef8', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.472040] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating folder: Project (c8cfacb73f8e46db9147929df2af33e0). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.472606] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 863.472848] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5753363-943a-424b-a710-65c7373923cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.474741] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-828d1652-64a4-400f-a5fc-700b2539f014 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.483347] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 863.483347] env[69982]: value = "task-3864686" [ 863.483347] env[69982]: _type = "Task" [ 863.483347] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.489456] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Created folder: Project (c8cfacb73f8e46db9147929df2af33e0) in parent group-v767796. [ 863.489456] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating folder: Instances. Parent ref: group-v767961. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.490037] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f5d28a9-710a-4431-9c41-ee48eab14498 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.499120] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864686, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.504567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.505098] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 863.510889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.035s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.511673] env[69982]: DEBUG nova.objects.instance [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lazy-loading 'resources' on Instance uuid 2d554902-bf28-4ee2-b9d6-4219e54246fc {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.513418] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Created folder: Instances in parent group-v767961. [ 863.517021] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.517021] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.517021] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95985bd6-5191-4081-ae3b-53e2e8157ad2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.544680] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.544680] env[69982]: value = "task-3864688" [ 863.544680] env[69982]: _type = "Task" [ 863.544680] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.556737] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864688, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.656702] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "67613f71-a91e-4dae-8a6c-cd74c4821339" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.656939] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.726923] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864681, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.741963] env[69982]: DEBUG nova.compute.manager [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Received event network-changed-0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 863.743469] env[69982]: DEBUG nova.compute.manager [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Refreshing instance network info cache due to event network-changed-0f022a40-3503-463d-a390-efa4ea123ef8. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 863.743736] env[69982]: DEBUG oslo_concurrency.lockutils [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] Acquiring lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.743896] env[69982]: DEBUG oslo_concurrency.lockutils [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] Acquired lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.744082] env[69982]: DEBUG nova.network.neutron [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Refreshing network info cache for port 0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 863.852334] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.852617] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.852837] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.853202] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.853428] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.855854] env[69982]: INFO nova.compute.manager [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Terminating instance [ 863.996436] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864686, 'name': PowerOffVM_Task, 'duration_secs': 0.294551} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.996436] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 863.997293] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b92df36-3024-4ac6-9a26-4d8b413be8f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.029314] env[69982]: DEBUG nova.compute.utils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.034832] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.035034] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.038435] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f64da4-8611-4b5b-bfef-bf936c188d9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.074182] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864688, 'name': CreateVM_Task, 'duration_secs': 0.374981} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.075050] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.075439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.076450] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.076865] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 864.077583] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6940fb2e-766c-4336-9956-2961e976f37a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.094476] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 864.094476] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52723c80-f755-e97b-d177-5f79627328ae" [ 864.094476] env[69982]: _type = "Task" [ 864.094476] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.100109] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.100842] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58893ac1-fded-4c1d-9dc2-8b19f278f809 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.109746] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52723c80-f755-e97b-d177-5f79627328ae, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.114814] env[69982]: DEBUG nova.policy [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5eb7bc6d38764da0a4f06fc1c16a2a62', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c8cfacb73f8e46db9147929df2af33e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 864.116561] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.117093] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.117736] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.117955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.118279] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.118536] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 864.118536] env[69982]: value = "task-3864689" [ 864.118536] env[69982]: _type = "Task" [ 864.118536] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.119254] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be558242-99f0-4fee-b56c-fd9554775c98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.133969] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 864.134210] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.134431] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.136392] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.136392] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.137213] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8e6499e-f461-429d-9603-684d4201bbeb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.148027] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 864.148027] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d88e2-1799-1c9f-09c9-b84b8d1b8fcf" [ 864.148027] env[69982]: _type = "Task" [ 864.148027] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.158767] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d88e2-1799-1c9f-09c9-b84b8d1b8fcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.226547] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864681, 'name': CloneVM_Task, 'duration_secs': 1.772887} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.227403] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Created linked-clone VM from snapshot [ 864.227721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c9039a-fa6b-4a3b-bdde-32afa06f77d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.236634] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Uploading image f06c75b9-f2af-4fdf-8aa4-851297d650bd {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 864.251014] env[69982]: DEBUG nova.network.neutron [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.269570] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 864.269570] env[69982]: value = "vm-767960" [ 864.269570] env[69982]: _type = "VirtualMachine" [ 864.269570] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 864.269999] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ca5e9a87-a082-4bd6-8c9b-e5e7941482b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.284470] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lease: (returnval){ [ 864.284470] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e99300-d06d-e79f-95ef-db6b2df1fe6c" [ 864.284470] env[69982]: _type = "HttpNfcLease" [ 864.284470] env[69982]: } obtained for exporting VM: (result){ [ 864.284470] env[69982]: value = "vm-767960" [ 864.284470] env[69982]: _type = "VirtualMachine" [ 864.284470] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 864.284715] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the lease: (returnval){ [ 864.284715] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e99300-d06d-e79f-95ef-db6b2df1fe6c" [ 864.284715] env[69982]: _type = "HttpNfcLease" [ 864.284715] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 864.294492] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 864.294492] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e99300-d06d-e79f-95ef-db6b2df1fe6c" [ 864.294492] env[69982]: _type = "HttpNfcLease" [ 864.294492] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 864.362195] env[69982]: DEBUG nova.compute.manager [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 864.362195] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 864.363302] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6854417-ea6f-4251-8653-24cf876b31f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.380369] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 864.380369] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-665f115c-a577-484b-9b26-8336abb7675b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.394230] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 864.394230] env[69982]: value = "task-3864691" [ 864.394230] env[69982]: _type = "Task" [ 864.394230] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.420089] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.543995] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 864.604530] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Successfully created port: f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 864.639479] env[69982]: DEBUG nova.network.neutron [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Updated VIF entry in instance network info cache for port 0f022a40-3503-463d-a390-efa4ea123ef8. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 864.639479] env[69982]: DEBUG nova.network.neutron [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Updating instance_info_cache with network_info: [{"id": "0f022a40-3503-463d-a390-efa4ea123ef8", "address": "fa:16:3e:20:8e:2b", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f022a40-35", "ovs_interfaceid": "0f022a40-3503-463d-a390-efa4ea123ef8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.659248] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d88e2-1799-1c9f-09c9-b84b8d1b8fcf, 'name': SearchDatastore_Task, 'duration_secs': 0.016143} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.663638] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ef6295c-8004-4abd-968f-7003d21247f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.677575] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 864.677575] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526c4e4d-0105-e29e-16ed-366022441e4c" [ 864.677575] env[69982]: _type = "Task" [ 864.677575] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.678305] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.678586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.691743] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526c4e4d-0105-e29e-16ed-366022441e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.012228} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.694638] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 864.694977] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 43a8e7dd-843b-49f6-9edb-60c2b380e9c2/43a8e7dd-843b-49f6-9edb-60c2b380e9c2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 864.695488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.695686] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.695953] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b914747e-996a-48b9-8405-3548758ec901 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.700359] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d32b8107-0bb6-4ad5-8abc-0853cd0dc4b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.707516] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 864.707516] env[69982]: value = "task-3864692" [ 864.707516] env[69982]: _type = "Task" [ 864.707516] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.710278] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.710278] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.715486] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b6e5911-0afc-4a8c-bed6-2c0fecb9eaa6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.723865] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.728411] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 864.728411] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520cb2ce-b083-3353-ee98-82d2726b0a0d" [ 864.728411] env[69982]: _type = "Task" [ 864.728411] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.739597] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520cb2ce-b083-3353-ee98-82d2726b0a0d, 'name': SearchDatastore_Task, 'duration_secs': 0.012166} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.742551] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4354efb-c667-4eed-8c9e-774cf7e53547 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.749108] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 864.749108] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a0855-fce1-ebd2-ddcc-c36e08f31501" [ 864.749108] env[69982]: _type = "Task" [ 864.749108] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.760203] env[69982]: INFO nova.compute.manager [-] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Took 1.41 seconds to deallocate network for instance. [ 864.760682] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a0855-fce1-ebd2-ddcc-c36e08f31501, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.789449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9675a3-96d4-4553-ba96-00aabb83c662 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.802963] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 864.802963] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e99300-d06d-e79f-95ef-db6b2df1fe6c" [ 864.802963] env[69982]: _type = "HttpNfcLease" [ 864.802963] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 864.803443] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 864.803443] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e99300-d06d-e79f-95ef-db6b2df1fe6c" [ 864.803443] env[69982]: _type = "HttpNfcLease" [ 864.803443] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 864.804595] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a7827-dbb6-4231-867c-2facdfeae342 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.810020] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6306fc08-0d3c-4c5c-ab7e-735f7bf1eb72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.819793] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 864.819852] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 864.850023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2119ed6e-7df8-4652-82c0-160817aba9a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.918436] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7271564d-ba21-448a-8c97-61f69bede6e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.928238] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864691, 'name': PowerOffVM_Task, 'duration_secs': 0.214668} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.928238] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 864.928238] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 864.928238] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ca6bc90-fc0f-47e6-8caa-abeafa8ecdd1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.941935] env[69982]: DEBUG nova.compute.provider_tree [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.022159] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 865.022812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 865.022812] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleting the datastore file [datastore1] 930c8740-5ad1-4491-8dd6-1a568eaa6f62 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 865.023145] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13bedb97-dca2-4065-8d0f-593ab0608143 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.032983] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 865.032983] env[69982]: value = "task-3864694" [ 865.032983] env[69982]: _type = "Task" [ 865.032983] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.043764] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864694, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.147571] env[69982]: DEBUG oslo_concurrency.lockutils [req-b25bc812-9322-405a-98ae-5424dea456b9 req-44f943ba-f3e3-4353-870c-dacaebb1bc5a service nova] Releasing lock "refresh_cache-43a8e7dd-843b-49f6-9edb-60c2b380e9c2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.201856] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7d498c5e-e6cd-44dc-be64-600efa2ff220 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.220639] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864692, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.260255] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524a0855-fce1-ebd2-ddcc-c36e08f31501, 'name': SearchDatastore_Task, 'duration_secs': 0.011038} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.260492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.260837] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 865.261331] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a8f638b-15f1-4421-9a59-37c1f9e95ae5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.272054] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.272474] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 865.272474] env[69982]: value = "task-3864695" [ 865.272474] env[69982]: _type = "Task" [ 865.272474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.281928] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.446189] env[69982]: DEBUG nova.scheduler.client.report [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 865.545923] env[69982]: DEBUG oslo_vmware.api [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864694, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.359254} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.546265] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 865.546402] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 865.546578] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 865.546753] env[69982]: INFO nova.compute.manager [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Took 1.18 seconds to destroy the instance on the hypervisor. [ 865.547174] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 865.547436] env[69982]: DEBUG nova.compute.manager [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 865.547539] env[69982]: DEBUG nova.network.neutron [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 865.556049] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 865.583101] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 865.583406] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.583566] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 865.583750] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.583895] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 865.584184] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 865.584472] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 865.584682] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 865.584899] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 865.585193] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 865.585404] env[69982]: DEBUG nova.virt.hardware [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 865.586424] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4886b3bc-59c3-4f9d-8a3d-c84d1e8f5d1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.596489] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fff93b-3c31-4f99-83dc-6e7198deda91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.733826] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.580208} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.734105] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 43a8e7dd-843b-49f6-9edb-60c2b380e9c2/43a8e7dd-843b-49f6-9edb-60c2b380e9c2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 865.734551] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 865.734967] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87621716-c67d-48e8-9bcc-e3eba6321e97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.745718] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 865.745718] env[69982]: value = "task-3864696" [ 865.745718] env[69982]: _type = "Task" [ 865.745718] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.756166] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.789202] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864695, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.871389] env[69982]: DEBUG nova.compute.manager [req-d24a4aba-237c-4473-a6fc-548ac81c5eb8 req-caebaa0e-59ee-41a1-8e77-f5395e627636 service nova] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Received event network-vif-deleted-f2644b3d-9f1e-40f2-8c76-55c4ed912488 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.953031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.440s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.954251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.890s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.956940] env[69982]: INFO nova.compute.claims [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.983020] env[69982]: INFO nova.scheduler.client.report [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Deleted allocations for instance 2d554902-bf28-4ee2-b9d6-4219e54246fc [ 866.257163] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100937} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.257819] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 866.259222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4717181-9ddb-45d4-9ded-019892696448 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.287878] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] 43a8e7dd-843b-49f6-9edb-60c2b380e9c2/43a8e7dd-843b-49f6-9edb-60c2b380e9c2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.291911] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a90a5e28-4f3a-4d38-abcc-899a8ac48d98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.317642] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768255} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.319336] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 866.320780] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 866.320780] env[69982]: value = "task-3864697" [ 866.320780] env[69982]: _type = "Task" [ 866.320780] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.321751] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a9e994-7d82-4e7d-8d9b-caa54c6fd759 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.357412] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 866.357777] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864697, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.358797] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Successfully updated port: f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.360356] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1792c97-18d9-4517-8685-852f01186458 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.376109] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.376370] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.376716] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 866.386216] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 866.386216] env[69982]: value = "task-3864698" [ 866.386216] env[69982]: _type = "Task" [ 866.386216] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.399045] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.491804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4f404465-71b0-4fa8-a2ca-89a3863b6c39 tempest-SecurityGroupsTestJSON-991727273 tempest-SecurityGroupsTestJSON-991727273-project-member] Lock "2d554902-bf28-4ee2-b9d6-4219e54246fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.573s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.659397] env[69982]: DEBUG nova.compute.manager [req-73811d89-3901-4a1f-a005-f08c9e82276f req-f7c104da-2b2d-43af-80fb-f3db23788e22 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Received event network-vif-deleted-066271e7-f03a-48d7-a4a4-df17ef2b24f4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 866.659746] env[69982]: INFO nova.compute.manager [req-73811d89-3901-4a1f-a005-f08c9e82276f req-f7c104da-2b2d-43af-80fb-f3db23788e22 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Neutron deleted interface 066271e7-f03a-48d7-a4a4-df17ef2b24f4; detaching it from the instance and deleting it from the info cache [ 866.659835] env[69982]: DEBUG nova.network.neutron [req-73811d89-3901-4a1f-a005-f08c9e82276f req-f7c104da-2b2d-43af-80fb-f3db23788e22 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.836898] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864697, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.900491] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864698, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.917675] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 866.984083] env[69982]: DEBUG nova.network.neutron [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.146166] env[69982]: DEBUG nova.network.neutron [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Updating instance_info_cache with network_info: [{"id": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "address": "fa:16:3e:1b:df:da", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05f6887-5e", "ovs_interfaceid": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.172557] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88df718b-9c14-4779-8cf0-6fc221aeb2b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.191328] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80aa0b42-be6f-4ed8-af29-463a72e78f69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.238860] env[69982]: DEBUG nova.compute.manager [req-73811d89-3901-4a1f-a005-f08c9e82276f req-f7c104da-2b2d-43af-80fb-f3db23788e22 service nova] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Detach interface failed, port_id=066271e7-f03a-48d7-a4a4-df17ef2b24f4, reason: Instance 930c8740-5ad1-4491-8dd6-1a568eaa6f62 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 867.339165] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864697, 'name': ReconfigVM_Task, 'duration_secs': 0.586686} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.339514] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Reconfigured VM instance instance-00000037 to attach disk [datastore2] 43a8e7dd-843b-49f6-9edb-60c2b380e9c2/43a8e7dd-843b-49f6-9edb-60c2b380e9c2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.340194] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62174837-3129-40f0-938f-a9f6c6263f73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.350171] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 867.350171] env[69982]: value = "task-3864699" [ 867.350171] env[69982]: _type = "Task" [ 867.350171] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.360980] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864699, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.399944] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864698, 'name': ReconfigVM_Task, 'duration_secs': 0.660316} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.400165] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfigured VM instance instance-00000033 to attach disk [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 867.401286] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c7cf34-e4f3-401c-a4e2-22fda0da655d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.436239] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd08b98b-8b6d-4b61-b121-ab330c7ca57d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.452716] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 867.452716] env[69982]: value = "task-3864700" [ 867.452716] env[69982]: _type = "Task" [ 867.452716] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.463556] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864700, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.487688] env[69982]: INFO nova.compute.manager [-] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Took 1.94 seconds to deallocate network for instance. [ 867.606566] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086400b7-4d34-4b9f-aa1b-7f64d45f880d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.616229] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ddc49b-8a39-4aa0-8fdf-ba7bac22a86a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.650210] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d82dd5-acdd-48aa-b143-a17fffda7517 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.655652] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.655652] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Instance network_info: |[{"id": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "address": "fa:16:3e:1b:df:da", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05f6887-5e", "ovs_interfaceid": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.655863] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:df:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.664340] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.665192] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.665507] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec05f6de-a40d-4264-bd2d-729cb5c4524a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.685292] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e60f25-16f3-49da-b857-90aafcdd397d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.692893] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.692893] env[69982]: value = "task-3864701" [ 867.692893] env[69982]: _type = "Task" [ 867.692893] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.707323] env[69982]: DEBUG nova.compute.provider_tree [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 867.715917] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864701, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.864396] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864699, 'name': Rename_Task, 'duration_secs': 0.214517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.864714] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.865011] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74f1c60f-c66d-4cc2-befb-2caf5c003f7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.874022] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 867.874022] env[69982]: value = "task-3864702" [ 867.874022] env[69982]: _type = "Task" [ 867.874022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.883703] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864702, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.961683] env[69982]: DEBUG nova.compute.manager [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Received event network-vif-plugged-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.961911] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Acquiring lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.963193] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.963365] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.963567] env[69982]: DEBUG nova.compute.manager [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] No waiting events found dispatching network-vif-plugged-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 867.963746] env[69982]: WARNING nova.compute.manager [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Received unexpected event network-vif-plugged-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 for instance with vm_state building and task_state spawning. [ 867.963992] env[69982]: DEBUG nova.compute.manager [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Received event network-changed-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.964585] env[69982]: DEBUG nova.compute.manager [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Refreshing instance network info cache due to event network-changed-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 867.964847] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Acquiring lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.964995] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Acquired lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.965242] env[69982]: DEBUG nova.network.neutron [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Refreshing network info cache for port f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 867.975445] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864700, 'name': ReconfigVM_Task, 'duration_secs': 0.205756} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.975445] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 867.975698] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f18232f-d681-43e8-93f4-eb9d962a554c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.984890] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 867.984890] env[69982]: value = "task-3864703" [ 867.984890] env[69982]: _type = "Task" [ 867.984890] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.997297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.997636] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864703, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.206320] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864701, 'name': CreateVM_Task, 'duration_secs': 0.43693} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.206731] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.207922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.208347] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.208908] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.209363] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-110d85e8-4c77-4ec2-a274-2a4d33357b2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.213027] env[69982]: DEBUG nova.scheduler.client.report [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 868.223477] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 868.223477] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202639a-1d5f-d883-ff3f-8f41319d12b8" [ 868.223477] env[69982]: _type = "Task" [ 868.223477] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.231794] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202639a-1d5f-d883-ff3f-8f41319d12b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.389487] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864702, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.496754] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864703, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.719384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.720371] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 868.723701] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.990s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.725665] env[69982]: INFO nova.compute.claims [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 868.744960] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202639a-1d5f-d883-ff3f-8f41319d12b8, 'name': SearchDatastore_Task, 'duration_secs': 0.031206} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.745932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.745932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.746081] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.746244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.746603] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.749117] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-058d0ed2-5555-4a04-9eb3-cb1f62bb4c21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.759009] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.759230] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.760081] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c3c0581-fc6d-4e87-a219-6e107e88ecb6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.768554] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 868.768554] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52414a12-8170-9ce4-f2cc-013438a681ef" [ 868.768554] env[69982]: _type = "Task" [ 868.768554] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.781153] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52414a12-8170-9ce4-f2cc-013438a681ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.782833] env[69982]: DEBUG nova.network.neutron [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Updated VIF entry in instance network info cache for port f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 868.783095] env[69982]: DEBUG nova.network.neutron [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Updating instance_info_cache with network_info: [{"id": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "address": "fa:16:3e:1b:df:da", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.170", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf05f6887-5e", "ovs_interfaceid": "f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.900617] env[69982]: DEBUG oslo_vmware.api [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864702, 'name': PowerOnVM_Task, 'duration_secs': 0.976747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.900617] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 868.900617] env[69982]: INFO nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 8.36 seconds to spawn the instance on the hypervisor. [ 868.900617] env[69982]: DEBUG nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 868.900617] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48353b4d-7a5d-46e1-8b67-430ab7178ed4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.998918] env[69982]: DEBUG oslo_vmware.api [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864703, 'name': PowerOnVM_Task, 'duration_secs': 0.742386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.999214] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.002175] env[69982]: DEBUG nova.compute.manager [None req-b14f14a2-d688-4f72-b86b-12ba2c2d208e tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 869.003033] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c80525-0903-4655-b9eb-e4868dca1948 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.093963] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.093963] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.234565] env[69982]: DEBUG nova.compute.utils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.235517] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 869.235517] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 869.280411] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52414a12-8170-9ce4-f2cc-013438a681ef, 'name': SearchDatastore_Task, 'duration_secs': 0.021004} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.281472] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-902cbf1f-66b7-46bf-8f13-03bc115e336e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.286473] env[69982]: DEBUG oslo_concurrency.lockutils [req-64b9f340-0198-4367-a720-3f3a7f003d21 req-449e5000-efff-4801-92ec-3fefce529cd7 service nova] Releasing lock "refresh_cache-570675a8-3ec0-4fe6-b123-d3901d56b8cf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.288783] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 869.288783] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c2030e-2e8c-2722-10d6-feaa917952d6" [ 869.288783] env[69982]: _type = "Task" [ 869.288783] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.290458] env[69982]: DEBUG nova.policy [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 869.303098] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c2030e-2e8c-2722-10d6-feaa917952d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.421466] env[69982]: INFO nova.compute.manager [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 48.09 seconds to build instance. [ 869.597662] env[69982]: DEBUG nova.compute.utils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 869.740771] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 869.755564] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Successfully created port: 3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 869.810796] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c2030e-2e8c-2722-10d6-feaa917952d6, 'name': SearchDatastore_Task, 'duration_secs': 0.029771} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.811111] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.811451] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 570675a8-3ec0-4fe6-b123-d3901d56b8cf/570675a8-3ec0-4fe6-b123-d3901d56b8cf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.811761] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e268edf-731f-43fa-81ba-c60a57a08bad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.820724] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 869.820724] env[69982]: value = "task-3864704" [ 869.820724] env[69982]: _type = "Task" [ 869.820724] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.838711] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.924299] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa606902-71af-49eb-b11a-c0a8f33a8f15 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.802s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.103053] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.277493] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401929e5-26c3-4893-a475-8bc0711d7eee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.286938] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d07f68ab-2d5a-4e1a-a83a-fbdfe63bdaeb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.319686] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a921f25-9843-4fd1-ad95-eb1d748acc49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.331656] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e465a8-78f4-45bc-aa3c-0027aad99066 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.346980] env[69982]: DEBUG nova.compute.provider_tree [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.350720] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864704, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.429021] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.751064] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 870.776213] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.777017] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.777017] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.777260] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.777530] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.778470] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.778470] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.778470] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.778470] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.778807] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.778807] env[69982]: DEBUG nova.virt.hardware [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.780249] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06aaa04-3a62-4080-a3be-ea52de990483 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.789480] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f462a9d6-8546-464d-897f-5e638aebb6c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.836703] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.886782} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.837082] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 570675a8-3ec0-4fe6-b123-d3901d56b8cf/570675a8-3ec0-4fe6-b123-d3901d56b8cf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 870.837336] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 870.837635] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9edba20-acb1-4eee-9d84-dc9ce6249f8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.846478] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 870.846478] env[69982]: value = "task-3864705" [ 870.846478] env[69982]: _type = "Task" [ 870.846478] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.852116] env[69982]: DEBUG nova.scheduler.client.report [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 870.860662] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864705, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.956662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.187866] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.188194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.188455] env[69982]: INFO nova.compute.manager [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Attaching volume 220a57d7-008a-4c0b-bb6b-1039e56ad8c6 to /dev/sdb [ 871.233728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f67b93-23d3-475e-844a-1dc4df6b8359 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.244130] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d6e9cf-c52f-4c84-977b-329fbbf491f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.263692] env[69982]: DEBUG nova.virt.block_device [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating existing volume attachment record: 38ba0064-42bc-4403-9f6d-ce5e54beb5e8 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 871.364325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.365064] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 871.368860] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864705, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106035} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.369296] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.711s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.370093] env[69982]: DEBUG nova.objects.instance [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lazy-loading 'resources' on Instance uuid 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 871.372426] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.373931] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70dabbc0-dd63-4aa7-a10a-2dfa6c8d23dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.419891] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 570675a8-3ec0-4fe6-b123-d3901d56b8cf/570675a8-3ec0-4fe6-b123-d3901d56b8cf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.422521] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e898d215-fae4-4358-ae48-6630370af793 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.445661] env[69982]: INFO nova.compute.manager [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Unrescuing [ 871.445994] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.446207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.446461] env[69982]: DEBUG nova.network.neutron [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 871.453317] env[69982]: DEBUG nova.compute.manager [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Received event network-vif-plugged-3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.453317] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] Acquiring lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.453317] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.453317] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.453317] env[69982]: DEBUG nova.compute.manager [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] No waiting events found dispatching network-vif-plugged-3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.453508] env[69982]: WARNING nova.compute.manager [req-ba0ce214-3d5e-41a0-a853-1ceb03790773 req-67b618f4-5048-4a87-af88-20d2c6f5d925 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Received unexpected event network-vif-plugged-3719920b-0825-4574-9add-6fc870171069 for instance with vm_state building and task_state spawning. [ 871.462275] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 871.462275] env[69982]: value = "task-3864707" [ 871.462275] env[69982]: _type = "Task" [ 871.462275] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.471259] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864707, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.503565] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Successfully updated port: 3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.879510] env[69982]: DEBUG nova.compute.utils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 871.881415] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 871.881630] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 871.952280] env[69982]: DEBUG nova.policy [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '114e89c3714d4ce39f4947a0aa567aba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5ba58648e534c55953c89a4eae7caf4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.974300] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864707, 'name': ReconfigVM_Task, 'duration_secs': 0.413937} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.974587] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 570675a8-3ec0-4fe6-b123-d3901d56b8cf/570675a8-3ec0-4fe6-b123-d3901d56b8cf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 871.975328] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ce16219-5867-489f-b3c5-a09b2aafbe04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.983768] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 871.983768] env[69982]: value = "task-3864710" [ 871.983768] env[69982]: _type = "Task" [ 871.983768] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.995119] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864710, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.013572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.013783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.013999] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 872.304220] env[69982]: DEBUG nova.network.neutron [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updating instance_info_cache with network_info: [{"id": "b5a29604-4742-41d4-b219-8bbd59089c94", "address": "fa:16:3e:5d:73:4d", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5a29604-47", "ovs_interfaceid": "b5a29604-4742-41d4-b219-8bbd59089c94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.387156] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 872.394301] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Successfully created port: e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 872.468157] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2175144a-62bf-42ab-bd1a-003f45cf24f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.476940] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2198944-635a-4929-bd5e-b4c45392e273 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.514515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8515363d-96c5-47f5-ac8b-f9c2ee04f0d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.527639] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b08d3d-b105-469c-8ab1-5904b47cdf46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.531709] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864710, 'name': Rename_Task, 'duration_secs': 0.219072} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.532040] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.532650] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6333c8f3-db4b-4de9-b29d-d55d1de17150 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.543155] env[69982]: DEBUG nova.compute.provider_tree [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.546743] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 872.546743] env[69982]: value = "task-3864711" [ 872.546743] env[69982]: _type = "Task" [ 872.546743] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.551687] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 872.561424] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.724651] env[69982]: DEBUG nova.network.neutron [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Updating instance_info_cache with network_info: [{"id": "3719920b-0825-4574-9add-6fc870171069", "address": "fa:16:3e:f7:9b:63", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3719920b-08", "ovs_interfaceid": "3719920b-0825-4574-9add-6fc870171069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.807129] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-cd839916-6daf-4b31-941d-6305a585bfaa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.807822] env[69982]: DEBUG nova.objects.instance [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'flavor' on Instance uuid cd839916-6daf-4b31-941d-6305a585bfaa {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 873.048973] env[69982]: DEBUG nova.scheduler.client.report [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.064261] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864711, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.228617] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.228617] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Instance network_info: |[{"id": "3719920b-0825-4574-9add-6fc870171069", "address": "fa:16:3e:f7:9b:63", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3719920b-08", "ovs_interfaceid": "3719920b-0825-4574-9add-6fc870171069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 873.228986] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:9b:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3719920b-0825-4574-9add-6fc870171069', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.237599] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 873.237599] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 873.237599] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44e7b85c-872a-4f11-bdf0-81b9ba60453c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.260035] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.260035] env[69982]: value = "task-3864712" [ 873.260035] env[69982]: _type = "Task" [ 873.260035] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.269928] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864712, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.315086] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6318b11-9fbc-4a91-9f96-f1b3ec5e5e2a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.342038] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.342038] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2290f592-d287-4a0e-a9a2-488feea24a69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.351871] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 873.351871] env[69982]: value = "task-3864713" [ 873.351871] env[69982]: _type = "Task" [ 873.351871] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.363157] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864713, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.397525] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 873.425840] env[69982]: DEBUG nova.compute.manager [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Received event network-changed-3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 873.427850] env[69982]: DEBUG nova.compute.manager [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Refreshing instance network info cache due to event network-changed-3719920b-0825-4574-9add-6fc870171069. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 873.427850] env[69982]: DEBUG oslo_concurrency.lockutils [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] Acquiring lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.427850] env[69982]: DEBUG oslo_concurrency.lockutils [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] Acquired lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.427850] env[69982]: DEBUG nova.network.neutron [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Refreshing network info cache for port 3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 873.431615] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 873.432038] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 873.432091] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 873.432263] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 873.432830] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 873.432830] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 873.432830] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 873.432990] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 873.433122] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 873.433304] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 873.433486] env[69982]: DEBUG nova.virt.hardware [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 873.434785] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be306089-9e85-4ec2-9b1e-d5e1c4c54942 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.446559] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b5e99f-98d4-4429-8d9e-eeef51b96533 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.558751] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.561586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.842s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.563204] env[69982]: INFO nova.compute.claims [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.573126] env[69982]: DEBUG oslo_vmware.api [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864711, 'name': PowerOnVM_Task, 'duration_secs': 0.816747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.573441] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.573661] env[69982]: INFO nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Took 8.02 seconds to spawn the instance on the hypervisor. [ 873.573902] env[69982]: DEBUG nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.578045] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d095639b-3e23-4af2-a733-5122d6491f63 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.586519] env[69982]: INFO nova.scheduler.client.report [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Deleted allocations for instance 1bdb1577-cc35-4839-8992-ae3b4ab87eb2 [ 873.771917] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864712, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.863262] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864713, 'name': PowerOffVM_Task, 'duration_secs': 0.297537} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.863583] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 873.869013] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 873.869364] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-667aad1d-f02e-4f54-a674-13af6996a116 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.891566] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 873.891566] env[69982]: value = "task-3864715" [ 873.891566] env[69982]: _type = "Task" [ 873.891566] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.901729] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864715, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.021083] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Successfully updated port: e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 874.101247] env[69982]: INFO nova.compute.manager [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Took 47.01 seconds to build instance. [ 874.103342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-094f8c86-26ce-4760-9819-18838ccb28b5 tempest-ServersTestJSON-1811308298 tempest-ServersTestJSON-1811308298-project-member] Lock "1bdb1577-cc35-4839-8992-ae3b4ab87eb2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.313s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.273356] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864712, 'name': CreateVM_Task, 'duration_secs': 0.517614} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.273621] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.274390] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.274554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.274900] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.275191] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592b92e3-51ce-4186-b4e3-4904fadf3d80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.278176] env[69982]: DEBUG nova.network.neutron [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Updated VIF entry in instance network info cache for port 3719920b-0825-4574-9add-6fc870171069. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 874.279415] env[69982]: DEBUG nova.network.neutron [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Updating instance_info_cache with network_info: [{"id": "3719920b-0825-4574-9add-6fc870171069", "address": "fa:16:3e:f7:9b:63", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3719920b-08", "ovs_interfaceid": "3719920b-0825-4574-9add-6fc870171069", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.283874] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 874.283874] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca1b1a-b616-fe94-0175-b95adb6a02ac" [ 874.283874] env[69982]: _type = "Task" [ 874.283874] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.293142] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca1b1a-b616-fe94-0175-b95adb6a02ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.404055] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864715, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.523855] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.523855] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.523855] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 874.611612] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0dc09062-15c8-476f-8348-9f9e45137169 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.125s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.781756] env[69982]: DEBUG oslo_concurrency.lockutils [req-af50c988-1cb0-4fcd-8976-16b2ba9da15a req-0b619b41-82a2-4da2-a8b9-8580503843d6 service nova] Releasing lock "refresh_cache-3f896859-5a4a-4a59-bee8-b116e291fbe7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.795754] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca1b1a-b616-fe94-0175-b95adb6a02ac, 'name': SearchDatastore_Task, 'duration_secs': 0.039666} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.799746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.799746] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.800269] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.800359] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.800518] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.801185] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8156db6-7bfd-4c80-aeec-755beece7944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.812215] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.813034] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.817037] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-684fa4f9-96d8-4927-99d0-5b8587ec9595 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.823620] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 874.823620] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529337c0-60a2-dbe1-e973-4e79f11a832d" [ 874.823620] env[69982]: _type = "Task" [ 874.823620] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.836253] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529337c0-60a2-dbe1-e973-4e79f11a832d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.909495] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864715, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.072808] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c586a870-fb70-4fac-b1ee-a8b64d3f12be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.078786] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 875.086654] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d323889a-63cb-4943-a92f-9d1da39ef4dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.119408] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 875.125128] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27645dd4-506e-4086-943a-b144b2a1f9a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.133878] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3137df0a-75fd-45cd-9062-cc926ffb4b01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.151249] env[69982]: DEBUG nova.compute.provider_tree [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.268020] env[69982]: DEBUG nova.network.neutron [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Updating instance_info_cache with network_info: [{"id": "e31f87a2-d500-40b5-a928-2217787b6c1a", "address": "fa:16:3e:d4:6e:34", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31f87a2-d5", "ovs_interfaceid": "e31f87a2-d500-40b5-a928-2217787b6c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.334533] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529337c0-60a2-dbe1-e973-4e79f11a832d, 'name': SearchDatastore_Task, 'duration_secs': 0.021477} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.335389] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8620673f-5885-41ae-973f-b1805903b9ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.341272] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 875.341272] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524e1d89-d57e-53d0-4eb2-1010873c883c" [ 875.341272] env[69982]: _type = "Task" [ 875.341272] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.351199] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524e1d89-d57e-53d0-4eb2-1010873c883c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.405178] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864715, 'name': ReconfigVM_Task, 'duration_secs': 1.427825} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.405477] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 875.405722] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.405992] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37fdbabd-f0bb-4e0f-bbc6-d721ba020786 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.414728] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 875.414728] env[69982]: value = "task-3864716" [ 875.414728] env[69982]: _type = "Task" [ 875.414728] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.428677] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864716, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.483814] env[69982]: DEBUG nova.compute.manager [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Received event network-vif-plugged-e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.484055] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Acquiring lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.485579] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.485726] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.485925] env[69982]: DEBUG nova.compute.manager [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] No waiting events found dispatching network-vif-plugged-e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 875.489026] env[69982]: WARNING nova.compute.manager [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Received unexpected event network-vif-plugged-e31f87a2-d500-40b5-a928-2217787b6c1a for instance with vm_state building and task_state spawning. [ 875.489026] env[69982]: DEBUG nova.compute.manager [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Received event network-changed-e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.489026] env[69982]: DEBUG nova.compute.manager [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Refreshing instance network info cache due to event network-changed-e31f87a2-d500-40b5-a928-2217787b6c1a. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 875.489026] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Acquiring lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.650962] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.654938] env[69982]: DEBUG nova.scheduler.client.report [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.772499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.773087] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Instance network_info: |[{"id": "e31f87a2-d500-40b5-a928-2217787b6c1a", "address": "fa:16:3e:d4:6e:34", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31f87a2-d5", "ovs_interfaceid": "e31f87a2-d500-40b5-a928-2217787b6c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 875.773468] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Acquired lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.773711] env[69982]: DEBUG nova.network.neutron [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Refreshing network info cache for port e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 875.775450] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:6e:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e31f87a2-d500-40b5-a928-2217787b6c1a', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.787392] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.791784] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.792917] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16dc5a26-1429-46c4-aab5-cc5785e4619a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.817937] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.817937] env[69982]: value = "task-3864717" [ 875.817937] env[69982]: _type = "Task" [ 875.817937] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.826705] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 875.827075] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767966', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'name': 'volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a1de08e-3206-44cc-8d34-a5527faf9684', 'attached_at': '', 'detached_at': '', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'serial': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 875.828640] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f6997d-e16f-462c-ab45-510cd6d431c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.836986] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864717, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.860085] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393c6d5b-1bd0-4573-a122-6eff9ceada74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.867472] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524e1d89-d57e-53d0-4eb2-1010873c883c, 'name': SearchDatastore_Task, 'duration_secs': 0.031484} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.868285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.868601] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3f896859-5a4a-4a59-bee8-b116e291fbe7/3f896859-5a4a-4a59-bee8-b116e291fbe7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.869558] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb99c735-2294-4895-86f2-f270e0a9bb11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.894086] env[69982]: DEBUG nova.compute.manager [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 875.900220] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6/volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.902155] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb2822b-1bd8-414a-9ea8-faa362886c7a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.905147] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af595f2c-6812-49ef-be1d-a248ef136b1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.927543] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 875.927543] env[69982]: value = "task-3864718" [ 875.927543] env[69982]: _type = "Task" [ 875.927543] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.944436] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 875.944436] env[69982]: value = "task-3864719" [ 875.944436] env[69982]: _type = "Task" [ 875.944436] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.955924] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.956342] env[69982]: DEBUG oslo_vmware.api [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864716, 'name': PowerOnVM_Task, 'duration_secs': 0.467197} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.960335] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.960710] env[69982]: DEBUG nova.compute.manager [None req-c4997b07-8258-4692-a2db-71ae9a468368 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 875.961790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6062fdf-28d1-4773-a0b9-1bd324104c2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.970445] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.102770] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 876.104661] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861e22b7-586c-4087-9600-63fe29c14165 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.112213] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 876.112323] env[69982]: ERROR oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk due to incomplete transfer. [ 876.112628] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9a781190-c489-433d-b10b-38ee9242b8de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.122253] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5245f8d3-f329-cf45-99ce-eed2e206d1cc/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 876.122605] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Uploaded image f06c75b9-f2af-4fdf-8aa4-851297d650bd to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 876.124980] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 876.125344] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f1ad6205-4f1d-4850-a8d0-2dd28f24c221 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.134517] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 876.134517] env[69982]: value = "task-3864720" [ 876.134517] env[69982]: _type = "Task" [ 876.134517] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.148207] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864720, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.161766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.163074] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 876.165795] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.492s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.168222] env[69982]: INFO nova.compute.claims [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.333896] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864717, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.407287] env[69982]: DEBUG nova.network.neutron [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Updated VIF entry in instance network info cache for port e31f87a2-d500-40b5-a928-2217787b6c1a. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 876.407809] env[69982]: DEBUG nova.network.neutron [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Updating instance_info_cache with network_info: [{"id": "e31f87a2-d500-40b5-a928-2217787b6c1a", "address": "fa:16:3e:d4:6e:34", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape31f87a2-d5", "ovs_interfaceid": "e31f87a2-d500-40b5-a928-2217787b6c1a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.444174] env[69982]: INFO nova.compute.manager [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] instance snapshotting [ 876.449034] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864718, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.454975] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7447b761-187e-4e54-a8ab-445bc1c4619a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.466609] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864719, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.485539] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0df0c22-0255-45af-98b5-b53ea057e32c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.645858] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864720, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.676709] env[69982]: DEBUG nova.compute.utils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.687607] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.687813] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.802147] env[69982]: DEBUG nova.policy [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8dadbf580df4460f932e73c644aa6ef9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cb83f8a9d3af402384d53028ece0570a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.830171] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864717, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.912105] env[69982]: DEBUG oslo_concurrency.lockutils [req-c6d5e9b6-5ddf-470c-ada8-864c22592358 req-76e05a63-0682-4138-972e-0824d49062e1 service nova] Releasing lock "refresh_cache-48dbc665-8286-4d5d-af4e-1a85d1742952" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.943748] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572214} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.943916] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3f896859-5a4a-4a59-bee8-b116e291fbe7/3f896859-5a4a-4a59-bee8-b116e291fbe7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 876.945932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 876.945932] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ae2e312-e9cb-4ddc-acee-bb6a76b863a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.959433] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 876.959433] env[69982]: value = "task-3864721" [ 876.959433] env[69982]: _type = "Task" [ 876.959433] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.970867] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864719, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.976341] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.998498] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 876.999099] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-734b80f7-8385-44af-9f23-9733f598eb5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.007690] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 877.007690] env[69982]: value = "task-3864722" [ 877.007690] env[69982]: _type = "Task" [ 877.007690] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.016587] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864722, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.146746] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864720, 'name': Destroy_Task, 'duration_secs': 0.803575} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.147216] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Destroyed the VM [ 877.147381] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 877.147647] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-54f66acd-4ea7-4a00-941c-ef08f494ee07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.156922] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 877.156922] env[69982]: value = "task-3864723" [ 877.156922] env[69982]: _type = "Task" [ 877.156922] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.168922] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864723, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.195172] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 877.330242] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864717, 'name': CreateVM_Task, 'duration_secs': 1.462599} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.330242] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.331974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.332169] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.332363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 877.332637] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e905be-879f-402a-9066-343c257bdb20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.345877] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 877.345877] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f85aa0-3a36-e1e0-f17a-7d76927d8337" [ 877.345877] env[69982]: _type = "Task" [ 877.345877] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.354514] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f85aa0-3a36-e1e0-f17a-7d76927d8337, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.438220] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Successfully created port: 45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.474877] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864719, 'name': ReconfigVM_Task, 'duration_secs': 1.528582} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.475188] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6/volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 877.488498] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a35fa5c9-c811-4269-8f31-5f6fbee483ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.496530] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074797} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.499497] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.501271] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bfb4ba-4026-44af-9e4e-a9f1d1cf22fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.506121] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 877.506121] env[69982]: value = "task-3864724" [ 877.506121] env[69982]: _type = "Task" [ 877.506121] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.529707] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 3f896859-5a4a-4a59-bee8-b116e291fbe7/3f896859-5a4a-4a59-bee8-b116e291fbe7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.534300] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8037d1c-9d56-44ef-bcbb-3b3fdd034694 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.556628] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.563622] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864722, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.565383] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 877.565383] env[69982]: value = "task-3864725" [ 877.565383] env[69982]: _type = "Task" [ 877.565383] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.578973] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.673301] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864723, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.858390] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f85aa0-3a36-e1e0-f17a-7d76927d8337, 'name': SearchDatastore_Task, 'duration_secs': 0.013793} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.858390] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.858390] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.858672] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.858829] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.859025] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.861921] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98410903-79df-48e6-9a39-cc2a23f5c857 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.872831] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.873144] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.874274] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71a705a2-8325-4d53-9de4-688fdea5a93e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.882233] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 877.882233] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217fbd1-e1f0-23fa-4be0-b340b0fd7bc7" [ 877.882233] env[69982]: _type = "Task" [ 877.882233] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.889766] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f621b00f-f5bd-43a8-89ed-3cd01db1fa0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.904742] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217fbd1-e1f0-23fa-4be0-b340b0fd7bc7, 'name': SearchDatastore_Task, 'duration_secs': 0.010975} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.908487] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5efe5e65-36b0-4075-93f1-ff31f75ff42e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.913293] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaeed1fa-2832-4d1c-8288-00e931c1152e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.922206] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 877.922206] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52181fcf-7acd-494a-27c7-cea22a0bb5a7" [ 877.922206] env[69982]: _type = "Task" [ 877.922206] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.958012] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c3c91b-9efa-4e91-b2e7-4e4f8302af3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.969805] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52181fcf-7acd-494a-27c7-cea22a0bb5a7, 'name': SearchDatastore_Task, 'duration_secs': 0.022212} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.970243] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.970515] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 48dbc665-8286-4d5d-af4e-1a85d1742952/48dbc665-8286-4d5d-af4e-1a85d1742952.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.971909] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fdddc0-7475-44dd-8a5f-4d371c9cb940 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.975971] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9303d539-0ae6-426f-985f-4f705be2ec92 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.991326] env[69982]: DEBUG nova.compute.provider_tree [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.033464] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 878.033464] env[69982]: value = "task-3864726" [ 878.033464] env[69982]: _type = "Task" [ 878.033464] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.033464] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.033464] env[69982]: DEBUG oslo_vmware.api [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864724, 'name': ReconfigVM_Task, 'duration_secs': 0.179231} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.033931] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767966', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'name': 'volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a1de08e-3206-44cc-8d34-a5527faf9684', 'attached_at': '', 'detached_at': '', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'serial': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 878.034597] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864722, 'name': CreateSnapshot_Task, 'duration_secs': 0.649731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.034977] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 878.036947] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc32756-a33a-4032-8bad-1d1a94652e14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.080425] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864725, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.169305] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864723, 'name': RemoveSnapshot_Task, 'duration_secs': 0.531489} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.169305] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 878.172162] env[69982]: DEBUG nova.compute.manager [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.172162] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94648be8-50bd-4c00-b7ea-d9420a9c86ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.211800] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 878.256160] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.257249] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.257249] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.257249] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.257249] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.257350] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.257558] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.257785] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.257983] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.258204] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.258451] env[69982]: DEBUG nova.virt.hardware [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.259810] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af96bc77-b2e7-4f68-b704-d283edcbc0a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.272275] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba80104-363d-459a-bda6-ebb5347d7cc7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.495419] env[69982]: DEBUG nova.scheduler.client.report [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.510795] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864726, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.558943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 878.558943] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5a8d7605-9094-4e00-8965-e08fedb7e9d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.569211] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 878.569211] env[69982]: value = "task-3864727" [ 878.569211] env[69982]: _type = "Task" [ 878.569211] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.586208] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864725, 'name': ReconfigVM_Task, 'duration_secs': 0.549914} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.590339] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 3f896859-5a4a-4a59-bee8-b116e291fbe7/3f896859-5a4a-4a59-bee8-b116e291fbe7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.591396] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864727, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.591626] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37c5f3da-7771-4d2a-851a-7ef80f5da7bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.604345] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 878.604345] env[69982]: value = "task-3864728" [ 878.604345] env[69982]: _type = "Task" [ 878.604345] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.615470] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864728, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.687953] env[69982]: INFO nova.compute.manager [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Shelve offloading [ 878.950365] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "7af5a14d-f586-4746-9831-8be255581637" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.950863] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.951192] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "7af5a14d-f586-4746-9831-8be255581637-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.952261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.952261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.955932] env[69982]: INFO nova.compute.manager [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Terminating instance [ 879.001737] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.836s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.002343] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 879.005298] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.397s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.005896] env[69982]: DEBUG nova.objects.instance [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 879.022562] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545467} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.023220] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 48dbc665-8286-4d5d-af4e-1a85d1742952/48dbc665-8286-4d5d-af4e-1a85d1742952.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.023532] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.023941] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba10624a-89ae-488f-889c-0afb4849e9e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.032463] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 879.032463] env[69982]: value = "task-3864729" [ 879.032463] env[69982]: _type = "Task" [ 879.032463] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.044953] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.079443] env[69982]: DEBUG nova.objects.instance [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lazy-loading 'flavor' on Instance uuid 9a1de08e-3206-44cc-8d34-a5527faf9684 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.093531] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864727, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.125284] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864728, 'name': Rename_Task, 'duration_secs': 0.302759} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.125617] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.125867] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64c54436-d8c5-4866-b33a-b294949e449f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.135712] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 879.135712] env[69982]: value = "task-3864730" [ 879.135712] env[69982]: _type = "Task" [ 879.135712] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.146408] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864730, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.193298] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.193950] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-144f6b5f-87f4-4d87-8f8a-9dd0f2f55b1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.201822] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 879.201822] env[69982]: value = "task-3864731" [ 879.201822] env[69982]: _type = "Task" [ 879.201822] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.213247] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 879.213523] env[69982]: DEBUG nova.compute.manager [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.214753] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3439db1e-b50f-4e7e-9c0f-38e80fa55194 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.223726] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.223904] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.224101] env[69982]: DEBUG nova.network.neutron [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 879.465099] env[69982]: DEBUG nova.compute.manager [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.465392] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.466292] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d62594-bde7-4a65-873b-cf5c1e911613 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.476167] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.476455] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f877308-b334-4ae6-bc3d-c63c5060bcf2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.484148] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 879.484148] env[69982]: value = "task-3864732" [ 879.484148] env[69982]: _type = "Task" [ 879.484148] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.498776] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.519876] env[69982]: DEBUG nova.compute.utils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.522572] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 879.524643] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.546151] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077541} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.546500] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.548110] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dff1dc30-461c-45f6-af30-8ae414af5517 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.575604] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 48dbc665-8286-4d5d-af4e-1a85d1742952/48dbc665-8286-4d5d-af4e-1a85d1742952.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.576173] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f6a36bb-3126-4b2d-9788-d0e1750ed2ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.597503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-209b82af-44a3-4008-a562-97281c60ac17 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.409s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.611923] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864727, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.613529] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 879.613529] env[69982]: value = "task-3864733" [ 879.613529] env[69982]: _type = "Task" [ 879.613529] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.623492] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864733, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.631430] env[69982]: DEBUG nova.policy [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99858c5fbda7454cab0188cf368e51f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83b53a0998874810b5302415624592cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.647215] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864730, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.743462] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Successfully updated port: 45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.843146] env[69982]: DEBUG nova.compute.manager [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Received event network-vif-plugged-45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 879.843146] env[69982]: DEBUG oslo_concurrency.lockutils [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] Acquiring lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.843146] env[69982]: DEBUG oslo_concurrency.lockutils [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.843146] env[69982]: DEBUG oslo_concurrency.lockutils [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.843146] env[69982]: DEBUG nova.compute.manager [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] No waiting events found dispatching network-vif-plugged-45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 879.843405] env[69982]: WARNING nova.compute.manager [req-80031150-9dc1-4aef-94fb-bf9f4a1e4c4b req-eb63dd9c-3b78-45b4-9468-5b8313b45241 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Received unexpected event network-vif-plugged-45ff6687-e818-4364-9511-d15b0c637c11 for instance with vm_state building and task_state spawning. [ 879.996164] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864732, 'name': PowerOffVM_Task, 'duration_secs': 0.394652} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.996502] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.996663] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.997010] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85272575-4b89-4e5e-825f-2eacb44f4aa7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.018718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e47cb6f5-3c63-4970-b2d7-983e64f2d35e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.020163] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.527s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.023049] env[69982]: INFO nova.compute.claims [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.025758] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 880.072597] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.072816] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.072997] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Deleting the datastore file [datastore1] 7af5a14d-f586-4746-9831-8be255581637 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.073361] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc2756cb-c645-4038-bd4d-0166c9d02f88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.082164] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for the task: (returnval){ [ 880.082164] env[69982]: value = "task-3864735" [ 880.082164] env[69982]: _type = "Task" [ 880.082164] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.093009] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864735, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.109189] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864727, 'name': CloneVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.124437] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864733, 'name': ReconfigVM_Task, 'duration_secs': 0.37183} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.124766] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 48dbc665-8286-4d5d-af4e-1a85d1742952/48dbc665-8286-4d5d-af4e-1a85d1742952.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.125481] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0d7af57-ec7a-47b8-b542-e7779b18117d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.132878] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 880.132878] env[69982]: value = "task-3864736" [ 880.132878] env[69982]: _type = "Task" [ 880.132878] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.143739] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864736, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.150486] env[69982]: DEBUG oslo_vmware.api [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864730, 'name': PowerOnVM_Task, 'duration_secs': 0.754862} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.150805] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.151066] env[69982]: INFO nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Took 9.40 seconds to spawn the instance on the hypervisor. [ 880.151206] env[69982]: DEBUG nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.152049] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2fbd912-6154-4c10-916d-c20efb2ba2fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.247503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.247503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquired lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.247841] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 880.557166] env[69982]: DEBUG nova.network.neutron [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updating instance_info_cache with network_info: [{"id": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "address": "fa:16:3e:74:2d:e4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ecce80-43", "ovs_interfaceid": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.593164] env[69982]: DEBUG oslo_vmware.api [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Task: {'id': task-3864735, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162466} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.593453] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.593639] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.593818] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.594047] env[69982]: INFO nova.compute.manager [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] [instance: 7af5a14d-f586-4746-9831-8be255581637] Took 1.13 seconds to destroy the instance on the hypervisor. [ 880.594666] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.594742] env[69982]: DEBUG nova.compute.manager [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.594838] env[69982]: DEBUG nova.network.neutron [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 880.607226] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864727, 'name': CloneVM_Task, 'duration_secs': 1.62558} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.607415] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Created linked-clone VM from snapshot [ 880.608215] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9694401-731f-4b3e-91c2-f7089f25395f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.610894] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.611136] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.618649] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Uploading image d4c2f593-e18f-4b50-b392-250a022eb1f3 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 880.643071] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864736, 'name': Rename_Task, 'duration_secs': 0.296048} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.643292] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.643542] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65d48525-1bdf-4e33-885f-ff5d68c3547a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.647520] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 880.647520] env[69982]: value = "vm-767970" [ 880.647520] env[69982]: _type = "VirtualMachine" [ 880.647520] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 880.647771] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5c5656b5-3732-40f4-be6b-f2f22c2c390a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.650859] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 880.650859] env[69982]: value = "task-3864737" [ 880.650859] env[69982]: _type = "Task" [ 880.650859] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.659538] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease: (returnval){ [ 880.659538] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52880297-94ae-a657-89f4-1b8aace36ded" [ 880.659538] env[69982]: _type = "HttpNfcLease" [ 880.659538] env[69982]: } obtained for exporting VM: (result){ [ 880.659538] env[69982]: value = "vm-767970" [ 880.659538] env[69982]: _type = "VirtualMachine" [ 880.659538] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 880.659842] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the lease: (returnval){ [ 880.659842] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52880297-94ae-a657-89f4-1b8aace36ded" [ 880.659842] env[69982]: _type = "HttpNfcLease" [ 880.659842] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 880.664957] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864737, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.681476] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 880.681476] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52880297-94ae-a657-89f4-1b8aace36ded" [ 880.681476] env[69982]: _type = "HttpNfcLease" [ 880.681476] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 880.681476] env[69982]: INFO nova.compute.manager [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Took 50.63 seconds to build instance. [ 880.746915] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Successfully created port: f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.823616] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 881.022729] env[69982]: DEBUG nova.network.neutron [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Updating instance_info_cache with network_info: [{"id": "45ff6687-e818-4364-9511-d15b0c637c11", "address": "fa:16:3e:c4:24:b9", "network": {"id": "d3d928b0-ebd4-4035-99d1-28c28a7179b7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684818615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb83f8a9d3af402384d53028ece0570a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45ff6687-e8", "ovs_interfaceid": "45ff6687-e818-4364-9511-d15b0c637c11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.036401] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 881.070588] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.082467] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 881.082650] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.082677] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.082855] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.083014] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.083166] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 881.083379] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 881.083585] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 881.083818] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 881.084025] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 881.084211] env[69982]: DEBUG nova.virt.hardware [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 881.085871] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0df9832-20b7-4587-a0c7-6bafc6e655f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.101211] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2becd9ec-e650-42bf-9400-5350555fed23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.120077] env[69982]: INFO nova.compute.manager [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Detaching volume 220a57d7-008a-4c0b-bb6b-1039e56ad8c6 [ 881.167351] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864737, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.178039] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 881.178039] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52880297-94ae-a657-89f4-1b8aace36ded" [ 881.178039] env[69982]: _type = "HttpNfcLease" [ 881.178039] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 881.178853] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 881.178853] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52880297-94ae-a657-89f4-1b8aace36ded" [ 881.178853] env[69982]: _type = "HttpNfcLease" [ 881.178853] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 881.182274] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96bbd59-bd7a-46c3-9168-a656045485a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.184545] env[69982]: INFO nova.virt.block_device [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Attempting to driver detach volume 220a57d7-008a-4c0b-bb6b-1039e56ad8c6 from mountpoint /dev/sdb [ 881.184852] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 881.185197] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767966', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'name': 'volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a1de08e-3206-44cc-8d34-a5527faf9684', 'attached_at': '', 'detached_at': '', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'serial': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 881.186026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d900b2-c9f6-4735-9aea-6687afa50143 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.199047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b174c1b1-a305-4ae4-874e-cf56a9e06822 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.448s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.199405] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 881.199617] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 881.290873] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89deb68f-dbaa-4cd9-a974-3a2a88d9d47d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.299382] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7afaa5-242a-43be-9c90-3186fc35a9b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.328804] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfe6a3fe-25e9-4fc7-8621-45c82ac393bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.336282] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d75006ba-19ba-4f3d-b592-8d17c1ac30a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.349877] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] The volume has not been displaced from its original location: [datastore1] volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6/volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 881.358347] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 881.362240] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49d19fbc-103f-4379-8c1f-f1be65f9755e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.384071] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 881.384071] env[69982]: value = "task-3864739" [ 881.384071] env[69982]: _type = "Task" [ 881.384071] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.407217] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.527654] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Releasing lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.528050] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Instance network_info: |[{"id": "45ff6687-e818-4364-9511-d15b0c637c11", "address": "fa:16:3e:c4:24:b9", "network": {"id": "d3d928b0-ebd4-4035-99d1-28c28a7179b7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684818615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb83f8a9d3af402384d53028ece0570a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45ff6687-e8", "ovs_interfaceid": "45ff6687-e818-4364-9511-d15b0c637c11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 881.528654] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:24:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61b8f0db-488e-42d7-bf6c-6c1665cd5616', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45ff6687-e818-4364-9511-d15b0c637c11', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.537195] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Creating folder: Project (cb83f8a9d3af402384d53028ece0570a). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.537529] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d02774b2-fc48-493a-927b-a9a95b51b63c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.549590] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Created folder: Project (cb83f8a9d3af402384d53028ece0570a) in parent group-v767796. [ 881.550328] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Creating folder: Instances. Parent ref: group-v767971. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.550328] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f40dec9-35c3-4e96-a98e-cdea32febaa8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.563264] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Created folder: Instances in parent group-v767971. [ 881.563543] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.563986] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.564769] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b97762a3-05f1-44d1-9a8b-aa6ce0df3073 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.580906] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 881.584703] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987533cb-94ff-4930-bdd9-7f708d185416 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.594226] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.595198] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-895802e8-4269-4753-b2eb-49459865d06a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.597214] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.597214] env[69982]: value = "task-3864742" [ 881.597214] env[69982]: _type = "Task" [ 881.597214] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.612215] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864742, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.678577] env[69982]: DEBUG oslo_vmware.api [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864737, 'name': PowerOnVM_Task, 'duration_secs': 0.802334} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.683746] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.684225] env[69982]: INFO nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Took 8.29 seconds to spawn the instance on the hypervisor. [ 881.684679] env[69982]: DEBUG nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 881.685074] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.685334] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.685619] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore2] 881cbfae-7630-45e0-a8ad-b2cd283689ea {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.686805] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4122e22e-615a-498b-ac0e-c6878b44ee14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.689885] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88b5b330-ed0d-40eb-91fc-02c73591e501 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.704177] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 881.704177] env[69982]: value = "task-3864744" [ 881.704177] env[69982]: _type = "Task" [ 881.704177] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.717021] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864744, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.872969] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d099ff-8b9f-4b53-ab67-98c20089f314 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.889560] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.890073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.896926] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2138ea15-de4a-42b4-9ef5-5546531b030f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.909055] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864739, 'name': ReconfigVM_Task, 'duration_secs': 0.397779} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.942913] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 881.954655] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e25acc2-d896-45a1-93ca-43995b794f50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.969715] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157f7692-3ede-4b4e-81ab-0c073db6b7e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.984186] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6d3876-6b25-4342-979a-faed725cb6c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.989115] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 881.989115] env[69982]: value = "task-3864745" [ 881.989115] env[69982]: _type = "Task" [ 881.989115] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.007723] env[69982]: DEBUG nova.compute.provider_tree [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.020260] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.111283] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864742, 'name': CreateVM_Task, 'duration_secs': 0.472339} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.111533] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 882.112453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.112803] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.113401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 882.113753] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4478261-7dfb-4aad-9c82-5ddcfa55a36b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.121404] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 882.121404] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52656ba6-8ce9-3644-6b57-f35bda1e2510" [ 882.121404] env[69982]: _type = "Task" [ 882.121404] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.134152] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52656ba6-8ce9-3644-6b57-f35bda1e2510, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.159874] env[69982]: DEBUG nova.compute.manager [req-bc7be15d-5888-44ed-a469-b210c1c01df5 req-e8b9b0f4-a340-429b-852b-0792b7d802d8 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Received event network-vif-deleted-add5b64d-ceb9-4750-8e48-49d509a81e5f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.160745] env[69982]: INFO nova.compute.manager [req-bc7be15d-5888-44ed-a469-b210c1c01df5 req-e8b9b0f4-a340-429b-852b-0792b7d802d8 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Neutron deleted interface add5b64d-ceb9-4750-8e48-49d509a81e5f; detaching it from the instance and deleting it from the info cache [ 882.160745] env[69982]: DEBUG nova.network.neutron [req-bc7be15d-5888-44ed-a469-b210c1c01df5 req-e8b9b0f4-a340-429b-852b-0792b7d802d8 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.173407] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Received event network-changed-45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 882.173767] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Refreshing instance network info cache due to event network-changed-45ff6687-e818-4364-9511-d15b0c637c11. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 882.174147] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Acquiring lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.174621] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Acquired lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.174621] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Refreshing network info cache for port 45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 882.224376] env[69982]: DEBUG oslo_vmware.api [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864744, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228811} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.225016] env[69982]: INFO nova.compute.manager [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Took 51.52 seconds to build instance. [ 882.226635] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.226837] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.227037] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.265082] env[69982]: INFO nova.scheduler.client.report [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance 881cbfae-7630-45e0-a8ad-b2cd283689ea [ 882.272107] env[69982]: DEBUG nova.network.neutron [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.404026] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.501981] env[69982]: DEBUG oslo_vmware.api [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864745, 'name': ReconfigVM_Task, 'duration_secs': 0.210584} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.502471] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-767966', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'name': 'volume-220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9a1de08e-3206-44cc-8d34-a5527faf9684', 'attached_at': '', 'detached_at': '', 'volume_id': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6', 'serial': '220a57d7-008a-4c0b-bb6b-1039e56ad8c6'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 882.512122] env[69982]: DEBUG nova.scheduler.client.report [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 882.637180] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52656ba6-8ce9-3644-6b57-f35bda1e2510, 'name': SearchDatastore_Task, 'duration_secs': 0.018552} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.637554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.637792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.638058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.638203] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.638383] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.638691] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c416da5-3c41-463f-a93d-a16cab6ecd43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.652099] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.652898] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.656016] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6cb21bf-3295-4b03-82c1-0d20ab3e43e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.676744] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec500e18-1b31-4916-b9f4-190a2d58bbd8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.682756] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 882.682756] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5257fcfc-91d3-ee3c-f8a0-3f459793cda3" [ 882.682756] env[69982]: _type = "Task" [ 882.682756] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.699497] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11010837-8d91-4902-93f1-99522a85a552 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.718316] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5257fcfc-91d3-ee3c-f8a0-3f459793cda3, 'name': SearchDatastore_Task, 'duration_secs': 0.015317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.720486] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a117cb-0341-4f0c-99d4-4a20ed261103 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.729560] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 882.729560] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b53b72-90a9-990f-2fd9-b2238e2b8b76" [ 882.729560] env[69982]: _type = "Task" [ 882.729560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.735657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b492c1d1-32fa-4bee-a7d7-b4de4244491c tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.154s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.757423] env[69982]: DEBUG nova.compute.manager [req-bc7be15d-5888-44ed-a469-b210c1c01df5 req-e8b9b0f4-a340-429b-852b-0792b7d802d8 service nova] [instance: 7af5a14d-f586-4746-9831-8be255581637] Detach interface failed, port_id=add5b64d-ceb9-4750-8e48-49d509a81e5f, reason: Instance 7af5a14d-f586-4746-9831-8be255581637 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 882.763402] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b53b72-90a9-990f-2fd9-b2238e2b8b76, 'name': SearchDatastore_Task, 'duration_secs': 0.018747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.764030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.764222] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] b3aeaa98-724b-4563-aeaf-a089906eb0eb/b3aeaa98-724b-4563-aeaf-a089906eb0eb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.764582] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f500fb8e-a186-425e-929c-32d33d09c9e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.773733] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.774193] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 882.774193] env[69982]: value = "task-3864746" [ 882.774193] env[69982]: _type = "Task" [ 882.774193] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.775751] env[69982]: INFO nova.compute.manager [-] [instance: 7af5a14d-f586-4746-9831-8be255581637] Took 2.18 seconds to deallocate network for instance. [ 882.790975] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864746, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.933049] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.019367] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.999s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.020047] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 883.025386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 37.200s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.096590] env[69982]: DEBUG nova.objects.instance [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lazy-loading 'flavor' on Instance uuid 9a1de08e-3206-44cc-8d34-a5527faf9684 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.183367] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.183509] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.184681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.184681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.184681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.189277] env[69982]: INFO nova.compute.manager [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Terminating instance [ 883.248468] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Updated VIF entry in instance network info cache for port 45ff6687-e818-4364-9511-d15b0c637c11. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 883.249440] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Updating instance_info_cache with network_info: [{"id": "45ff6687-e818-4364-9511-d15b0c637c11", "address": "fa:16:3e:c4:24:b9", "network": {"id": "d3d928b0-ebd4-4035-99d1-28c28a7179b7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-684818615-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cb83f8a9d3af402384d53028ece0570a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61b8f0db-488e-42d7-bf6c-6c1665cd5616", "external-id": "nsx-vlan-transportzone-655", "segmentation_id": 655, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45ff6687-e8", "ovs_interfaceid": "45ff6687-e818-4364-9511-d15b0c637c11", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.296688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.297559] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864746, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.302391] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Successfully updated port: f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.528374] env[69982]: DEBUG nova.compute.utils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 883.529943] env[69982]: DEBUG nova.objects.instance [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lazy-loading 'migration_context' on Instance uuid 8b812422-4ca6-4d2b-b6af-873fdb21fab6 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.535442] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 883.535866] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 883.603255] env[69982]: DEBUG nova.policy [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64533b0ad8894d41bdf9fe921b440063', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '820dcd8333cb4a678ef562e4150518d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 883.693719] env[69982]: DEBUG nova.compute.manager [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.694100] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.695820] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e2eee2-43b9-4359-a1b5-f7b90601d739 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.707553] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.707553] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51d9c59c-4498-45b1-8a0f-7dff072496c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.716115] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 883.716115] env[69982]: value = "task-3864747" [ 883.716115] env[69982]: _type = "Task" [ 883.716115] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.727522] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.753227] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Releasing lock "refresh_cache-b3aeaa98-724b-4563-aeaf-a089906eb0eb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.753645] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received event network-vif-unplugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.753949] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.754308] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.754408] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.754728] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] No waiting events found dispatching network-vif-unplugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 883.754799] env[69982]: WARNING nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received unexpected event network-vif-unplugged-62ecce80-4354-4f66-a470-0b4ef8d663a8 for instance with vm_state shelved and task_state shelving_offloading. [ 883.755267] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Received event network-changed-62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.755557] env[69982]: DEBUG nova.compute.manager [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Refreshing instance network info cache due to event network-changed-62ecce80-4354-4f66-a470-0b4ef8d663a8. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.755748] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Acquiring lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.756037] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Acquired lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.756310] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Refreshing network info cache for port 62ecce80-4354-4f66-a470-0b4ef8d663a8 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 883.789037] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864746, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743875} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.789322] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] b3aeaa98-724b-4563-aeaf-a089906eb0eb/b3aeaa98-724b-4563-aeaf-a089906eb0eb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.789657] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.790066] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30080621-812a-438e-bbd5-286c95151baf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.801318] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 883.801318] env[69982]: value = "task-3864748" [ 883.801318] env[69982]: _type = "Task" [ 883.801318] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.809809] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.810036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.810211] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 883.818481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.818481] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864748, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.021587] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.021805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.035755] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 884.110661] env[69982]: DEBUG oslo_concurrency.lockutils [None req-20cc4fd4-ce05-4eb6-8e81-047fd97ec61f tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.499s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.137736] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Successfully created port: cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 884.230128] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864747, 'name': PowerOffVM_Task, 'duration_secs': 0.287623} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.233409] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.233804] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 884.234445] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6afc4f2c-c2ea-4388-b4cb-4851e399f0ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.310829] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.311418] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.311776] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore1] 3f896859-5a4a-4a59-bee8-b116e291fbe7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.317136] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21a11bbb-77b4-4386-a5f6-981dfd81cdad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.337190] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864748, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094398} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.337190] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 884.337190] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 884.337190] env[69982]: value = "task-3864750" [ 884.337190] env[69982]: _type = "Task" [ 884.337190] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.340117] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0864e7-fbf6-4f11-9a76-a4bd31bed44e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.366767] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.377877] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] b3aeaa98-724b-4563-aeaf-a089906eb0eb/b3aeaa98-724b-4563-aeaf-a089906eb0eb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 884.379050] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 884.384270] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c23d3b2-615c-465b-9054-bc9b263f8f29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.408835] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 884.408835] env[69982]: value = "task-3864751" [ 884.408835] env[69982]: _type = "Task" [ 884.408835] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.429835] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.527531] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 884.670806] env[69982]: DEBUG nova.compute.manager [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Received event network-vif-plugged-f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.672344] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.672344] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.672344] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.672344] env[69982]: DEBUG nova.compute.manager [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] No waiting events found dispatching network-vif-plugged-f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 884.672344] env[69982]: WARNING nova.compute.manager [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Received unexpected event network-vif-plugged-f9356a59-fa6f-4664-b5ff-4a2609f506c3 for instance with vm_state building and task_state spawning. [ 884.672539] env[69982]: DEBUG nova.compute.manager [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Received event network-changed-f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.672539] env[69982]: DEBUG nova.compute.manager [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Refreshing instance network info cache due to event network-changed-f9356a59-fa6f-4664-b5ff-4a2609f506c3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 884.672539] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Acquiring lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.684920] env[69982]: DEBUG nova.network.neutron [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.732984] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f05f92b-78d4-4dc5-92b7-7c5358f55dc4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.743213] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea444c3-48af-4bec-b6a7-38dd277175fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.748846] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updated VIF entry in instance network info cache for port 62ecce80-4354-4f66-a470-0b4ef8d663a8. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 884.749055] env[69982]: DEBUG nova.network.neutron [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updating instance_info_cache with network_info: [{"id": "62ecce80-4354-4f66-a470-0b4ef8d663a8", "address": "fa:16:3e:74:2d:e4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": null, "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap62ecce80-43", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.790807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a472e7-cc6e-4010-bdec-bb450e908d78 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.801636] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224672a7-bba3-4e05-8d87-cc2782560a0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.819661] env[69982]: DEBUG nova.compute.provider_tree [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.856322] env[69982]: DEBUG oslo_vmware.api [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206341} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.856671] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.856762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.857437] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.857437] env[69982]: INFO nova.compute.manager [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 884.857437] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.857692] env[69982]: DEBUG nova.compute.manager [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.858691] env[69982]: DEBUG nova.network.neutron [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 884.920614] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864751, 'name': ReconfigVM_Task, 'duration_secs': 0.435667} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.920998] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Reconfigured VM instance instance-0000003b to attach disk [datastore2] b3aeaa98-724b-4563-aeaf-a089906eb0eb/b3aeaa98-724b-4563-aeaf-a089906eb0eb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.921690] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcca6899-a7ae-4b19-83a7-d72f2c86af9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.931235] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 884.931235] env[69982]: value = "task-3864752" [ 884.931235] env[69982]: _type = "Task" [ 884.931235] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.940620] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864752, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.008816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "10a4294b-39ce-4643-98b5-71ac283f05f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.009123] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.045025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "8358b105-7276-4292-804d-534f9fb1535e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.045496] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.056449] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 885.061635] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.076592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 885.076828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.096776] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 885.097115] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 885.097312] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 885.097500] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 885.097641] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 885.098388] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 885.098388] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 885.098388] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 885.098388] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 885.098629] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 885.098629] env[69982]: DEBUG nova.virt.hardware [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 885.099933] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2745ff9-7dc3-4cd2-9f05-8193c0b0fc90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.109872] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520e16d1-ebec-4cd1-8a4e-c3ffabaa3196 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.188417] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.188873] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Instance network_info: |[{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 885.189231] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Acquired lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.189418] env[69982]: DEBUG nova.network.neutron [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Refreshing network info cache for port f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 885.190779] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:e7:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9356a59-fa6f-4664-b5ff-4a2609f506c3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 885.201622] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.202313] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 885.202568] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57023299-5e12-44aa-9f08-96e691665b5e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.229168] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.229168] env[69982]: value = "task-3864753" [ 885.229168] env[69982]: _type = "Task" [ 885.229168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.242554] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864753, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.252512] env[69982]: DEBUG oslo_concurrency.lockutils [req-079c3b7d-36b4-48f7-8786-eac180a01a3a req-4bad60f8-bb95-4b1c-9928-4ff179d2975b service nova] Releasing lock "refresh_cache-881cbfae-7630-45e0-a8ad-b2cd283689ea" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.323039] env[69982]: DEBUG nova.scheduler.client.report [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.442964] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864752, 'name': Rename_Task, 'duration_secs': 0.366924} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.443604] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 885.443912] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7423bc88-4cb1-4057-ad6a-e74aafc048a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.454114] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 885.454114] env[69982]: value = "task-3864754" [ 885.454114] env[69982]: _type = "Task" [ 885.454114] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.462660] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.723653] env[69982]: DEBUG nova.network.neutron [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.741219] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864753, 'name': CreateVM_Task, 'duration_secs': 0.46524} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.742213] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.742954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.743176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.743551] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 885.744123] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02ec4211-77bf-4f1d-b871-e22adc9a1517 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.750162] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 885.750162] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c6045-a777-5e6c-7ea4-ec8b09e29f2d" [ 885.750162] env[69982]: _type = "Task" [ 885.750162] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.760616] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c6045-a777-5e6c-7ea4-ec8b09e29f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.969414] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864754, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.226492] env[69982]: INFO nova.compute.manager [-] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Took 1.37 seconds to deallocate network for instance. [ 886.244715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.244966] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.263546] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c6045-a777-5e6c-7ea4-ec8b09e29f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.026111} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.263868] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.265083] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.265083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.265083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.265083] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.265083] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34b0d3f2-1911-48d1-a73d-ce01f1d35cb1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.276823] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.277089] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.278318] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f50950-9436-4472-8ae8-7da9a4ea2289 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.287804] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 886.287804] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528e8f96-ae80-d7ea-de62-96f1f296266c" [ 886.287804] env[69982]: _type = "Task" [ 886.287804] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.297181] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528e8f96-ae80-d7ea-de62-96f1f296266c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.335791] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.311s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.348548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.563s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.348548] env[69982]: DEBUG nova.objects.instance [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'resources' on Instance uuid a70fa652-4726-4bc2-966f-530aaa79ba86 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.370929] env[69982]: DEBUG nova.network.neutron [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updated VIF entry in instance network info cache for port f9356a59-fa6f-4664-b5ff-4a2609f506c3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 886.371326] env[69982]: DEBUG nova.network.neutron [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.463735] env[69982]: DEBUG oslo_vmware.api [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864754, 'name': PowerOnVM_Task, 'duration_secs': 0.691536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.464099] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.464310] env[69982]: INFO nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Took 8.25 seconds to spawn the instance on the hypervisor. [ 886.464490] env[69982]: DEBUG nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.465424] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-417e4510-82ef-4391-9c4a-aa301dc79aff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.506658] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Successfully updated port: cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 886.736812] env[69982]: DEBUG nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Received event network-vif-deleted-3719920b-0825-4574-9add-6fc870171069 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.737085] env[69982]: DEBUG nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Received event network-vif-plugged-cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.737209] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.737413] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.737572] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.737742] env[69982]: DEBUG nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] No waiting events found dispatching network-vif-plugged-cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.737907] env[69982]: WARNING nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Received unexpected event network-vif-plugged-cc682899-b104-4e53-b80d-49a30d6e0316 for instance with vm_state building and task_state spawning. [ 886.738315] env[69982]: DEBUG nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Received event network-changed-cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 886.738533] env[69982]: DEBUG nova.compute.manager [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Refreshing instance network info cache due to event network-changed-cc682899-b104-4e53-b80d-49a30d6e0316. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 886.738813] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.740107] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.740107] env[69982]: DEBUG nova.network.neutron [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Refreshing network info cache for port cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 886.743335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.799410] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528e8f96-ae80-d7ea-de62-96f1f296266c, 'name': SearchDatastore_Task, 'duration_secs': 0.043161} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.800308] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe6af97b-69a7-4c90-b614-26d677789710 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.807702] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 886.807702] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d3f0bd-186d-b6da-53f1-bb05481a59e5" [ 886.807702] env[69982]: _type = "Task" [ 886.807702] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.817447] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d3f0bd-186d-b6da-53f1-bb05481a59e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.873721] env[69982]: DEBUG oslo_concurrency.lockutils [req-0c068927-2e33-46ab-8379-81026326b1b6 req-0338876e-7f83-4c8d-96db-f5d53317d0ca service nova] Releasing lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.987086] env[69982]: INFO nova.compute.manager [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Took 50.29 seconds to build instance. [ 887.009377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 887.287424] env[69982]: DEBUG nova.network.neutron [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 887.321767] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d3f0bd-186d-b6da-53f1-bb05481a59e5, 'name': SearchDatastore_Task, 'duration_secs': 0.02805} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.322114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.322390] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 887.322723] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3aed5fa3-9be7-4a72-aaef-a63e3a918166 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.335192] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 887.335192] env[69982]: value = "task-3864755" [ 887.335192] env[69982]: _type = "Task" [ 887.335192] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.344556] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.384215] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efd0563-ef84-4bb3-b0ac-05be3228ea73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.393228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39ee1da-7715-46b9-bc4f-853bb5151f3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.445503] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eceeca-5f3c-431b-809a-8d59085b40ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.450620] env[69982]: DEBUG nova.network.neutron [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.459328] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b5ac6f-4144-4eff-a0a0-a80b63d89628 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.475078] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.475606] env[69982]: DEBUG nova.compute.provider_tree [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.489419] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8896eb52-3713-4037-92ee-db5f12028bf4 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.179s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.490639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.016s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.492469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.492469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.492469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.493865] env[69982]: INFO nova.compute.manager [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Terminating instance [ 887.850577] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864755, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.888824] env[69982]: INFO nova.compute.manager [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Swapping old allocation on dict_keys(['206a5498-2e79-46c1-a636-9488a05fb67d']) held by migration 535190ea-b5b3-4b17-a596-f0eafa3d66dd for instance [ 887.922044] env[69982]: DEBUG nova.scheduler.client.report [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Overwriting current allocation {'allocations': {'206a5498-2e79-46c1-a636-9488a05fb67d': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 86}}, 'project_id': '60aa47f826ce4ba7b14d6937eef58338', 'user_id': 'a4ecf3bf94764bbea25d59a4fea2ebda', 'consumer_generation': 1} on consumer 8b812422-4ca6-4d2b-b6af-873fdb21fab6 {{(pid=69982) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 887.953778] env[69982]: DEBUG oslo_concurrency.lockutils [req-81804499-4b2c-4757-b5b5-e3cea63d6f43 req-b4b5e7da-6e8a-4396-9dcf-c64bf3f7c204 service nova] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.954610] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 887.954806] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 887.979363] env[69982]: DEBUG nova.scheduler.client.report [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.995487] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 888.000321] env[69982]: DEBUG nova.compute.manager [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 888.000549] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.003148] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954e28f1-ff08-45da-9ffe-e5304c032db4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.016307] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.016639] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b733e23-445d-4817-ac61-730ee345e55d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.027055] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 888.027055] env[69982]: value = "task-3864756" [ 888.027055] env[69982]: _type = "Task" [ 888.027055] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.034398] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.034796] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquired lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.034860] env[69982]: DEBUG nova.network.neutron [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 888.040919] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864756, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.349930] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.726309} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.350383] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.350812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.351335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-125e9fca-fd31-46d6-901e-df79fb018dd7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.359323] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 888.359323] env[69982]: value = "task-3864757" [ 888.359323] env[69982]: _type = "Task" [ 888.359323] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.370177] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.487982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.141s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.492057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.363s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.493665] env[69982]: INFO nova.compute.claims [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.509579] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 888.540614] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864756, 'name': PowerOffVM_Task, 'duration_secs': 0.24558} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.540919] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.541102] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.541425] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b2abefd-183e-424c-bcd8-5c50a85dc5a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.613895] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.616134] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.616457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.616670] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Deleting the datastore file [datastore2] b3aeaa98-724b-4563-aeaf-a089906eb0eb {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.616950] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52b776b6-1b5c-45f1-a8df-acd9bdbe007c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.625076] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for the task: (returnval){ [ 888.625076] env[69982]: value = "task-3864759" [ 888.625076] env[69982]: _type = "Task" [ 888.625076] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.629858] env[69982]: INFO nova.scheduler.client.report [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocations for instance a70fa652-4726-4bc2-966f-530aaa79ba86 [ 888.634485] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.851257] env[69982]: DEBUG nova.network.neutron [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.873186] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071388} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.873235] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.874096] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbeab981-25e0-4ad3-b695-e490de123374 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.898891] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.899286] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d303fb5-1bb1-41b9-be7f-6edc1bfa0175 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.923204] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 888.923204] env[69982]: value = "task-3864760" [ 888.923204] env[69982]: _type = "Task" [ 888.923204] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.932417] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.978516] env[69982]: DEBUG nova.network.neutron [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [{"id": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "address": "fa:16:3e:e3:f8:55", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape0bdee0a-ac", "ovs_interfaceid": "e0bdee0a-ac19-47d1-9d6b-baffaa7a181a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.140913] env[69982]: DEBUG oslo_vmware.api [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Task: {'id': task-3864759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215524} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.141472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f1c87d25-3c2a-4489-93f7-58fe2f3ddcb2 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "a70fa652-4726-4bc2-966f-530aaa79ba86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.508s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.142756] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.142954] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.143169] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.143347] env[69982]: INFO nova.compute.manager [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 889.143663] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 889.143882] env[69982]: DEBUG nova.compute.manager [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 889.143988] env[69982]: DEBUG nova.network.neutron [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 889.354949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.355436] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance network_info: |[{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 889.355932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:cf:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc682899-b104-4e53-b80d-49a30d6e0316', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 889.370503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating folder: Project (820dcd8333cb4a678ef562e4150518d2). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 889.371112] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12e3dd62-462a-496c-84de-4a5262c87ec0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.387114] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created folder: Project (820dcd8333cb4a678ef562e4150518d2) in parent group-v767796. [ 889.387390] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating folder: Instances. Parent ref: group-v767975. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 889.387724] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2535c112-c441-4328-9237-ce7a12eacf53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.403748] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created folder: Instances in parent group-v767975. [ 889.404029] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 889.404205] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 889.406607] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fce27ca9-1984-4841-a9eb-e79a16be5ee2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.438845] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 889.438845] env[69982]: value = "task-3864763" [ 889.438845] env[69982]: _type = "Task" [ 889.438845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.447736] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864760, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.466206] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864763, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.481835] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Releasing lock "refresh_cache-8b812422-4ca6-4d2b-b6af-873fdb21fab6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.482429] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 889.483730] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bed9a3ac-7a4a-4444-af8c-49fca04f7420 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.495179] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 889.495179] env[69982]: value = "task-3864764" [ 889.495179] env[69982]: _type = "Task" [ 889.495179] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.513299] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864764, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.571818] env[69982]: DEBUG nova.compute.manager [req-5041001d-32c7-4e8e-ba83-76bc452910b0 req-27f36da2-29ce-4c18-b39a-7c8a98a8f786 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Received event network-vif-deleted-45ff6687-e818-4364-9511-d15b0c637c11 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.572093] env[69982]: INFO nova.compute.manager [req-5041001d-32c7-4e8e-ba83-76bc452910b0 req-27f36da2-29ce-4c18-b39a-7c8a98a8f786 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Neutron deleted interface 45ff6687-e818-4364-9511-d15b0c637c11; detaching it from the instance and deleting it from the info cache [ 889.572298] env[69982]: DEBUG nova.network.neutron [req-5041001d-32c7-4e8e-ba83-76bc452910b0 req-27f36da2-29ce-4c18-b39a-7c8a98a8f786 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.945020] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864760, 'name': ReconfigVM_Task, 'duration_secs': 0.611255} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.946341] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.949790] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4034309-2eec-4ee1-82ee-da27ab5507f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.963596] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864763, 'name': CreateVM_Task, 'duration_secs': 0.4111} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.965017] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 889.965395] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 889.965395] env[69982]: value = "task-3864765" [ 889.965395] env[69982]: _type = "Task" [ 889.965395] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.966104] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.966303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.966982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 889.970045] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d1de8b6-82e3-44e3-96e0-0a4d941b9220 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.982325] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 889.982325] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b1ff87-662e-3caf-7e6f-073af896004b" [ 889.982325] env[69982]: _type = "Task" [ 889.982325] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.985619] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864765, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.999222] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b1ff87-662e-3caf-7e6f-073af896004b, 'name': SearchDatastore_Task, 'duration_secs': 0.01557} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.003186] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.005765] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 890.005765] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.005765] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.005765] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 890.005765] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da00faeb-9e75-43fe-a46c-1eeff56b7d4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.013474] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864764, 'name': PowerOffVM_Task, 'duration_secs': 0.49274} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.014901] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 890.015714] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:06:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d19589d5-9552-4797-87a2-fa71245a23ed',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-913055492',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.015947] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.016120] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.016306] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.016450] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.016600] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.016809] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.016971] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.017155] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.017322] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.017611] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.022862] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 890.023060] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 890.027268] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6ac56a0-9ad2-44b0-9ac9-11fb0c41d577 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.037726] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e66f30da-a5a5-4304-bb84-7dae248c26b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.046682] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 890.046682] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e56619-c430-678b-d7df-2e4f37b9e5bb" [ 890.046682] env[69982]: _type = "Task" [ 890.046682] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.047203] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 890.047203] env[69982]: value = "task-3864766" [ 890.047203] env[69982]: _type = "Task" [ 890.047203] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.052697] env[69982]: DEBUG nova.network.neutron [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.068104] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864766, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.068510] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e56619-c430-678b-d7df-2e4f37b9e5bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.075234] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ef3f158-dff4-4c5b-9e1d-e841271137eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.090425] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebb2cd4-b3b7-4339-a630-f3593187ac38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.133478] env[69982]: DEBUG nova.compute.manager [req-5041001d-32c7-4e8e-ba83-76bc452910b0 req-27f36da2-29ce-4c18-b39a-7c8a98a8f786 service nova] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Detach interface failed, port_id=45ff6687-e818-4364-9511-d15b0c637c11, reason: Instance b3aeaa98-724b-4563-aeaf-a089906eb0eb could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 890.340764] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e95fdf-b8ab-485e-8dce-001f631cd504 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.352333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eb6139-9339-40fb-b3f9-ea0f00d44c15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.391142] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23ff896-bb48-4319-a261-e51a90b95599 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.401718] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d462599-a793-4f13-905c-18b2ad19d039 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.421201] env[69982]: DEBUG nova.compute.provider_tree [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.478240] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864765, 'name': Rename_Task, 'duration_secs': 0.210062} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.478484] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.478783] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ffe0ea6-3471-4865-b19a-d8640603200b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.486394] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 890.486394] env[69982]: value = "task-3864767" [ 890.486394] env[69982]: _type = "Task" [ 890.486394] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.497771] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.560493] env[69982]: INFO nova.compute.manager [-] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Took 1.42 seconds to deallocate network for instance. [ 890.571429] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e56619-c430-678b-d7df-2e4f37b9e5bb, 'name': SearchDatastore_Task, 'duration_secs': 0.018415} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.571782] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864766, 'name': ReconfigVM_Task, 'duration_secs': 0.242987} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.575805] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00299cfe-ddad-4b3a-99a4-fd79541470e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.583066] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f55056-2cf2-43aa-8c84-c12de713c2e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.591714] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 890.591714] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529a2561-61d6-f238-adb0-75164ea25989" [ 890.591714] env[69982]: _type = "Task" [ 890.591714] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.607257] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:06:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='d19589d5-9552-4797-87a2-fa71245a23ed',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-913055492',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 890.607902] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.607902] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 890.607902] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.608099] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 890.609386] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 890.609386] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 890.609386] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 890.609386] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 890.609649] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 890.609871] env[69982]: DEBUG nova.virt.hardware [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 890.614612] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ed35f6-7fd1-455d-ba6f-27198572489d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.625590] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 890.625590] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52719d60-f5d0-3041-ec37-286ee5312105" [ 890.625590] env[69982]: _type = "Task" [ 890.625590] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.625878] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529a2561-61d6-f238-adb0-75164ea25989, 'name': SearchDatastore_Task, 'duration_secs': 0.019451} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.626199] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.626461] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 890.629622] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-184579a2-f287-4e3f-9e99-aadc1de8ebb6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.638648] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52719d60-f5d0-3041-ec37-286ee5312105, 'name': SearchDatastore_Task, 'duration_secs': 0.008843} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.644921] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 890.646187] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 890.646187] env[69982]: value = "task-3864768" [ 890.646187] env[69982]: _type = "Task" [ 890.646187] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.646187] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29f926a0-702b-4f08-bf61-23f6bc6c1e6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.670805] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864768, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.672443] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 890.672443] env[69982]: value = "task-3864769" [ 890.672443] env[69982]: _type = "Task" [ 890.672443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.681748] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.928839] env[69982]: DEBUG nova.scheduler.client.report [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 891.001510] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864767, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.088228] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.172293] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864768, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.183455] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864769, 'name': ReconfigVM_Task, 'duration_secs': 0.266403} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.183762] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.184658] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944d5672-4ad3-425a-8a91-a6f99c371efc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.209740] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.210135] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e4e0b44-d628-4e18-9fbb-fe832c2dd6ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.231088] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 891.231088] env[69982]: value = "task-3864770" [ 891.231088] env[69982]: _type = "Task" [ 891.231088] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.241708] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864770, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.373730] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 891.374768] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58e5c1b-9e2a-4828-bf1f-3a1da9bfd8cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.381966] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 891.382191] env[69982]: ERROR oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk due to incomplete transfer. [ 891.382467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-676a272e-79ee-4ba3-9213-11742d4e2976 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.391049] env[69982]: DEBUG oslo_vmware.rw_handles [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5274234c-f1a0-dc5f-a1e7-6c0fe52eac87/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 891.391228] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Uploaded image d4c2f593-e18f-4b50-b392-250a022eb1f3 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 891.393765] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 891.394102] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8f01d410-329a-4a95-93d7-16fef7f2fbb6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.402241] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 891.402241] env[69982]: value = "task-3864771" [ 891.402241] env[69982]: _type = "Task" [ 891.402241] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.413010] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864771, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.436215] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.944s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.436774] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 891.439645] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.941s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.439924] env[69982]: DEBUG nova.objects.instance [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lazy-loading 'resources' on Instance uuid a427dc30-7e0f-4313-a8e9-f76451e4a112 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 891.501667] env[69982]: DEBUG oslo_vmware.api [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864767, 'name': PowerOnVM_Task, 'duration_secs': 0.696022} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.502126] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.502437] env[69982]: INFO nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Took 10.47 seconds to spawn the instance on the hypervisor. [ 891.502709] env[69982]: DEBUG nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 891.503890] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b055c54c-4a9a-4b99-af07-171c32fe3a0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.598762] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 891.599135] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 891.674544] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864768, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602638} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.674924] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 891.675278] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 891.675626] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a911385f-9aca-49b7-987a-85e43e797672 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.684367] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 891.684367] env[69982]: value = "task-3864772" [ 891.684367] env[69982]: _type = "Task" [ 891.684367] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.697855] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.742138] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.915102] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864771, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.946426] env[69982]: DEBUG nova.compute.utils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 891.950161] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 891.950161] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 891.996534] env[69982]: DEBUG nova.policy [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1e8a458282a94c7493628f2307755af3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6642ce24dd0040fea9a3f89b22343330', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 892.026980] env[69982]: INFO nova.compute.manager [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Took 50.38 seconds to build instance. [ 892.203045] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.203385] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 892.204315] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1212f6d7-ef77-4532-a8d1-ecce3fa2ade3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.236671] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 892.240909] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd4905a9-bb7e-4b25-88c4-e1459ee5d7cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.268300] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864770, 'name': ReconfigVM_Task, 'duration_secs': 0.596962} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.270609] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6/8b812422-4ca6-4d2b-b6af-873fdb21fab6.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.271377] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 892.271377] env[69982]: value = "task-3864773" [ 892.271377] env[69982]: _type = "Task" [ 892.271377] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.275032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd77a6c-28ed-4591-9ca2-b2f3963309f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.307434] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c96be85-8cb5-44d9-b75b-7f4ab9423471 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.311340] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.334297] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d46936-bc57-4e20-86db-0f57bffe52b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.357915] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee94aae-25d8-4454-babc-f53b3d804026 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.366677] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.367260] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60ff9361-623d-4298-bda1-7779d60a7060 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.371727] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Successfully created port: 2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 892.377589] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 892.377589] env[69982]: value = "task-3864774" [ 892.377589] env[69982]: _type = "Task" [ 892.377589] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.386680] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864774, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.415572] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864771, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.451690] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 892.529198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-43c465ca-9ad5-43d7-ac1a-4cc846e5b696 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.821s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.578197] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8436e455-be46-4996-9a60-9c99b8690a96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.587468] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bbfcd7-3fc3-4efd-b4cf-5d8b2695e28e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.621664] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80915d97-1017-4605-b998-bcead953b286 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.630857] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e16e8bc-b98d-4581-8607-834551cd76f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.647226] env[69982]: DEBUG nova.compute.provider_tree [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.791239] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864773, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.888266] env[69982]: DEBUG oslo_vmware.api [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864774, 'name': PowerOnVM_Task, 'duration_secs': 0.423453} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.888537] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 892.918230] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864771, 'name': Destroy_Task, 'duration_secs': 1.257475} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.918571] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Destroyed the VM [ 892.918947] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 892.919368] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a3663802-f8b7-4f6f-9d75-bb548ed9e650 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.927465] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 892.927465] env[69982]: value = "task-3864775" [ 892.927465] env[69982]: _type = "Task" [ 892.927465] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.936948] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864775, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.035178] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 893.150470] env[69982]: DEBUG nova.scheduler.client.report [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 893.288147] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864773, 'name': ReconfigVM_Task, 'duration_secs': 0.704133} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.288443] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfigured VM instance instance-0000003d to attach disk [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 893.289314] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6397b188-c612-4ee5-9a83-50ec1c168364 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.296769] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 893.296769] env[69982]: value = "task-3864776" [ 893.296769] env[69982]: _type = "Task" [ 893.296769] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.305601] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864776, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.439328] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864775, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.464893] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 893.490490] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 893.490837] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 893.490980] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 893.491158] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 893.491309] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 893.491457] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 893.491671] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 893.491840] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 893.492065] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 893.492252] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 893.492432] env[69982]: DEBUG nova.virt.hardware [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 893.493375] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235adde1-21bb-4b77-b33f-fdd815758007 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.502901] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72fbfd8-109b-46d0-8c31-9b245b44f57b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.553610] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 893.655816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.216s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.658322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 42.775s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.658529] env[69982]: DEBUG nova.objects.instance [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 893.680359] env[69982]: INFO nova.scheduler.client.report [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted allocations for instance a427dc30-7e0f-4313-a8e9-f76451e4a112 [ 893.811917] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864776, 'name': Rename_Task, 'duration_secs': 0.175792} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.812324] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 893.812658] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0d2cb8a-5d4f-4b7d-ab9d-3919891940c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.824222] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 893.824222] env[69982]: value = "task-3864777" [ 893.824222] env[69982]: _type = "Task" [ 893.824222] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.834583] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.900742] env[69982]: INFO nova.compute.manager [None req-1f875848-e6dd-4699-9591-3be7e9959728 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance to original state: 'active' [ 893.941316] env[69982]: DEBUG oslo_vmware.api [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864775, 'name': RemoveSnapshot_Task, 'duration_secs': 0.957305} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.941556] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 893.941850] env[69982]: INFO nova.compute.manager [None req-3c1e6770-3706-4f8a-9210-552f56587343 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 17.49 seconds to snapshot the instance on the hypervisor. [ 894.028267] env[69982]: DEBUG nova.compute.manager [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Received event network-vif-plugged-2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 894.028526] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] Acquiring lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.028951] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.029019] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.029179] env[69982]: DEBUG nova.compute.manager [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] No waiting events found dispatching network-vif-plugged-2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 894.029345] env[69982]: WARNING nova.compute.manager [req-ae29a414-9c73-4b46-acac-898f33f2be5f req-133b3afd-a26d-4e35-a7f9-12ba7e3de493 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Received unexpected event network-vif-plugged-2b1bc572-6c6d-4ce7-8f25-cffe67034c33 for instance with vm_state building and task_state spawning. [ 894.176183] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Successfully updated port: 2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 894.191627] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8b206fa4-e471-4890-9bcd-602600636cee tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "a427dc30-7e0f-4313-a8e9-f76451e4a112" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.809s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.338345] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864777, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.386124] env[69982]: DEBUG nova.compute.manager [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 894.679600] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da398029-059a-4454-bfc0-bc3812066452 tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.681316] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.951s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 894.681563] env[69982]: DEBUG nova.objects.instance [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lazy-loading 'resources' on Instance uuid c563267f-7699-4bd1-83cf-59ecef500ac3 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 894.683972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.684153] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.684423] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 894.840266] env[69982]: DEBUG oslo_vmware.api [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864777, 'name': PowerOnVM_Task, 'duration_secs': 0.94528} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.841628] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 894.841902] env[69982]: INFO nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 9.78 seconds to spawn the instance on the hypervisor. [ 894.841902] env[69982]: DEBUG nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.843040] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4786f141-457b-4cf4-8693-0c67105c1ff8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.915053] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.243632] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 895.366037] env[69982]: INFO nova.compute.manager [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 50.95 seconds to build instance. [ 895.503384] env[69982]: DEBUG nova.network.neutron [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [{"id": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "address": "fa:16:3e:1d:77:ed", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1bc572-6c", "ovs_interfaceid": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.787897] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58008324-5ef2-4029-9ba1-4652008ccba8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.797206] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f963a987-eafb-4b62-ae03-d04d97aedc6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.830930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce121492-ffeb-468b-a364-2895549d8965 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.840323] env[69982]: DEBUG nova.compute.manager [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.841994] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7579f602-edc0-4649-939a-6d1fa7ca099d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.847136] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c019352-a957-4b07-a466-02a06a951883 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.866228] env[69982]: DEBUG nova.compute.provider_tree [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 895.871970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-516f35ad-3ad7-40c0-95eb-a3bb38efd88c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.416s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.006466] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.006994] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Instance network_info: |[{"id": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "address": "fa:16:3e:1d:77:ed", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1bc572-6c", "ovs_interfaceid": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 896.008027] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:77:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dba18786-598d-4e06-96db-b3dc1717530f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b1bc572-6c6d-4ce7-8f25-cffe67034c33', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 896.015769] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 896.016017] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 896.016252] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8764cfc8-3222-42b2-998a-621860326388 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.039162] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 896.039162] env[69982]: value = "task-3864778" [ 896.039162] env[69982]: _type = "Task" [ 896.039162] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.047644] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864778, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.072634] env[69982]: DEBUG nova.compute.manager [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Received event network-changed-2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 896.072937] env[69982]: DEBUG nova.compute.manager [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Refreshing instance network info cache due to event network-changed-2b1bc572-6c6d-4ce7-8f25-cffe67034c33. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 896.073304] env[69982]: DEBUG oslo_concurrency.lockutils [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] Acquiring lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.073536] env[69982]: DEBUG oslo_concurrency.lockutils [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] Acquired lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.073796] env[69982]: DEBUG nova.network.neutron [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Refreshing network info cache for port 2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 896.089917] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.090324] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.090554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.090752] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.091010] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.093417] env[69982]: INFO nova.compute.manager [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Terminating instance [ 896.330418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.330960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.369287] env[69982]: INFO nova.compute.manager [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] instance snapshotting [ 896.371755] env[69982]: DEBUG nova.scheduler.client.report [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 896.376083] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 896.378970] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b567e8-1843-4870-ae0a-7887b2029461 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.402290] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7dd9f0-8650-4293-9d10-c29242670fc3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.548897] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864778, 'name': CreateVM_Task, 'duration_secs': 0.473501} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.549081] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.549805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.549967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.550334] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 896.550591] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-852679b2-34f4-46a6-83bc-aeb99dafb286 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.555652] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 896.555652] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdee0a-54f9-2f7c-03d1-3f35c6ce119c" [ 896.555652] env[69982]: _type = "Task" [ 896.555652] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.564130] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdee0a-54f9-2f7c-03d1-3f35c6ce119c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.596847] env[69982]: DEBUG nova.compute.manager [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 896.597104] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.597983] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458881ff-5032-4d27-9b85-7e5b7e545e8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.606786] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 896.609242] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95618645-3b2f-402c-b8ab-f24a3b97d738 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.617062] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 896.617062] env[69982]: value = "task-3864779" [ 896.617062] env[69982]: _type = "Task" [ 896.617062] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.626472] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.866505] env[69982]: DEBUG nova.network.neutron [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updated VIF entry in instance network info cache for port 2b1bc572-6c6d-4ce7-8f25-cffe67034c33. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 896.866505] env[69982]: DEBUG nova.network.neutron [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [{"id": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "address": "fa:16:3e:1d:77:ed", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1bc572-6c", "ovs_interfaceid": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.886007] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.892285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.130s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.892568] env[69982]: DEBUG nova.objects.instance [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lazy-loading 'resources' on Instance uuid c34355fa-3712-4338-942d-acdb2f8a91ee {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.907057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.915112] env[69982]: INFO nova.scheduler.client.report [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted allocations for instance c563267f-7699-4bd1-83cf-59ecef500ac3 [ 896.919130] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 896.919755] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dd95db0e-2140-46db-a0bc-da8a9d7eb262 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.933842] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 896.933842] env[69982]: value = "task-3864780" [ 896.933842] env[69982]: _type = "Task" [ 896.933842] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.948838] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864780, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.068551] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdee0a-54f9-2f7c-03d1-3f35c6ce119c, 'name': SearchDatastore_Task, 'duration_secs': 0.02316} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.069070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.069408] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 897.069772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.070067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.070386] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.070761] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a31b2391-0280-4d54-a1b3-3c86c993259d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.078357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.078357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.078357] env[69982]: INFO nova.compute.manager [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Rebooting instance [ 897.080702] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.081062] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 897.081952] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e6c7b0-d33c-4319-8d45-7cd07a121f32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.090034] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 897.090034] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52970e82-446b-18e4-d553-62f1c75ec0ec" [ 897.090034] env[69982]: _type = "Task" [ 897.090034] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.104126] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52970e82-446b-18e4-d553-62f1c75ec0ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.127814] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864779, 'name': PowerOffVM_Task, 'duration_secs': 0.213213} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.128146] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.128293] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.128541] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7552cfb8-6679-4eb7-99bc-5f59650fa1f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.132683] env[69982]: DEBUG nova.compute.manager [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Received event network-changed-cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.132881] env[69982]: DEBUG nova.compute.manager [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Refreshing instance network info cache due to event network-changed-cc682899-b104-4e53-b80d-49a30d6e0316. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 897.133124] env[69982]: DEBUG oslo_concurrency.lockutils [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.133282] env[69982]: DEBUG oslo_concurrency.lockutils [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.133496] env[69982]: DEBUG nova.network.neutron [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Refreshing network info cache for port cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 897.194438] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.194438] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.194438] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleting the datastore file [datastore1] 8b812422-4ca6-4d2b-b6af-873fdb21fab6 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.194438] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09a64b3e-c5aa-4c74-a4f3-dc1c0456c885 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.203080] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 897.203080] env[69982]: value = "task-3864782" [ 897.203080] env[69982]: _type = "Task" [ 897.203080] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.211488] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.369992] env[69982]: DEBUG oslo_concurrency.lockutils [req-42f8b12c-3f12-41bc-9017-bab76fd0c8a9 req-12eed045-07d9-4f75-92d1-609401ad22e4 service nova] Releasing lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.423194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-54e58e89-5cbf-4a23-b6ad-eb9f8588e82e tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "c563267f-7699-4bd1-83cf-59ecef500ac3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.724s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.447884] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864780, 'name': CreateSnapshot_Task, 'duration_secs': 0.487279} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.448181] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 897.448960] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd899191-ddc0-4c69-a469-38eaac2f7a5b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.596895] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.597143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.597359] env[69982]: DEBUG nova.network.neutron [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 897.604922] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52970e82-446b-18e4-d553-62f1c75ec0ec, 'name': SearchDatastore_Task, 'duration_secs': 0.011076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.606209] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f4bad2d-45a3-4de1-b419-6200ca080032 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.612638] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 897.612638] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5221f65f-b539-a4fa-03fd-8259bafe1f17" [ 897.612638] env[69982]: _type = "Task" [ 897.612638] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.625380] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5221f65f-b539-a4fa-03fd-8259bafe1f17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.716423] env[69982]: DEBUG oslo_vmware.api [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192251} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.716725] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.716948] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 897.717583] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 897.718022] env[69982]: INFO nova.compute.manager [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 897.718222] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.718777] env[69982]: DEBUG nova.compute.manager [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 897.718777] env[69982]: DEBUG nova.network.neutron [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 897.923062] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3432d309-082e-45d2-9135-6b50dd874a55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.933109] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a957d93-f1e9-4650-a93c-5513f8fe3939 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.986129] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 897.991901] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a4a5fadc-89e1-48f2-aa7c-e5dd6456f04a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.993187] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1808879-8f4f-4147-8ceb-4a93cf4400da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.003120] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e2018f2-d45e-49ff-9d94-b2b73e6bf809 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.006953] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 898.006953] env[69982]: value = "task-3864783" [ 898.006953] env[69982]: _type = "Task" [ 898.006953] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.019207] env[69982]: DEBUG nova.compute.provider_tree [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.028631] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864783, 'name': CloneVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.126934] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5221f65f-b539-a4fa-03fd-8259bafe1f17, 'name': SearchDatastore_Task, 'duration_secs': 0.012769} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.127274] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.127606] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a61e3d25-9064-4f18-b7f1-0045b705571a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 898.127928] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-919e87c7-c396-4208-81a5-43fdef2901b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.139528] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 898.139528] env[69982]: value = "task-3864784" [ 898.139528] env[69982]: _type = "Task" [ 898.139528] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.148577] env[69982]: DEBUG nova.network.neutron [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updated VIF entry in instance network info cache for port cc682899-b104-4e53-b80d-49a30d6e0316. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 898.149032] env[69982]: DEBUG nova.network.neutron [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.153752] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.417287] env[69982]: DEBUG nova.compute.manager [req-3c32f3d1-9f5b-49ce-bd68-ba8667e51e0c req-e202e7e8-d495-4961-8305-6023b0582eeb service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Received event network-vif-deleted-e0bdee0a-ac19-47d1-9d6b-baffaa7a181a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 898.417818] env[69982]: INFO nova.compute.manager [req-3c32f3d1-9f5b-49ce-bd68-ba8667e51e0c req-e202e7e8-d495-4961-8305-6023b0582eeb service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Neutron deleted interface e0bdee0a-ac19-47d1-9d6b-baffaa7a181a; detaching it from the instance and deleting it from the info cache [ 898.417818] env[69982]: DEBUG nova.network.neutron [req-3c32f3d1-9f5b-49ce-bd68-ba8667e51e0c req-e202e7e8-d495-4961-8305-6023b0582eeb service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.454356] env[69982]: DEBUG nova.network.neutron [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.518531] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864783, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.524146] env[69982]: DEBUG nova.scheduler.client.report [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.650216] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864784, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.655029] env[69982]: DEBUG oslo_concurrency.lockutils [req-3ec0cbd6-21fb-428a-b172-c71cdc76868e req-1a026e7c-20bb-4bf4-9322-3fd4bf8cb21a service nova] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.759134] env[69982]: DEBUG nova.network.neutron [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.920870] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1b50965-cc75-4553-8b20-d60efbf15501 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.932532] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8def4033-6d98-4ef5-a663-e4d2a3175816 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.970442] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.971764] env[69982]: DEBUG nova.compute.manager [req-3c32f3d1-9f5b-49ce-bd68-ba8667e51e0c req-e202e7e8-d495-4961-8305-6023b0582eeb service nova] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Detach interface failed, port_id=e0bdee0a-ac19-47d1-9d6b-baffaa7a181a, reason: Instance 8b812422-4ca6-4d2b-b6af-873fdb21fab6 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 899.018017] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864783, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.032248] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.140s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.034656] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 43.197s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.057572] env[69982]: INFO nova.scheduler.client.report [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Deleted allocations for instance c34355fa-3712-4338-942d-acdb2f8a91ee [ 899.151015] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638467} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.151266] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a61e3d25-9064-4f18-b7f1-0045b705571a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 899.151509] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 899.151778] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fd115b4-952a-4185-b69f-6b382d0608db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.159341] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 899.159341] env[69982]: value = "task-3864785" [ 899.159341] env[69982]: _type = "Task" [ 899.159341] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.169884] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.262107] env[69982]: INFO nova.compute.manager [-] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Took 1.54 seconds to deallocate network for instance. [ 899.476667] env[69982]: DEBUG nova.compute.manager [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 899.477402] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e32dc2-0c12-4f31-92b8-d98b2a9bd841 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.528883] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864783, 'name': CloneVM_Task, 'duration_secs': 1.29724} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.528883] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Created linked-clone VM from snapshot [ 899.529627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107aee6e-437e-42b4-a54f-f9e7f5c65a6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.551238] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Uploading image 5b8b1607-0ed9-475e-9c95-23428d81e909 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 899.566024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f7c7343d-0c1d-434e-b634-0ba3f07d6a7f tempest-ServerShowV257Test-1360877640 tempest-ServerShowV257Test-1360877640-project-member] Lock "c34355fa-3712-4338-942d-acdb2f8a91ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.749s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.586564] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 899.586564] env[69982]: value = "vm-767980" [ 899.586564] env[69982]: _type = "VirtualMachine" [ 899.586564] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 899.586809] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a11485c9-2375-435b-a0b8-d8ed4944a1c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.596052] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease: (returnval){ [ 899.596052] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ef9386-9777-0aad-1fa0-adba97245f2d" [ 899.596052] env[69982]: _type = "HttpNfcLease" [ 899.596052] env[69982]: } obtained for exporting VM: (result){ [ 899.596052] env[69982]: value = "vm-767980" [ 899.596052] env[69982]: _type = "VirtualMachine" [ 899.596052] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 899.596352] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the lease: (returnval){ [ 899.596352] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ef9386-9777-0aad-1fa0-adba97245f2d" [ 899.596352] env[69982]: _type = "HttpNfcLease" [ 899.596352] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 899.604436] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.604436] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ef9386-9777-0aad-1fa0-adba97245f2d" [ 899.604436] env[69982]: _type = "HttpNfcLease" [ 899.604436] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 899.670340] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075631} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.670628] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 899.671467] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050d0154-212a-4d60-aeee-8cd15d07821f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.697656] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a61e3d25-9064-4f18-b7f1-0045b705571a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 899.697969] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-160b423e-d89b-44af-8dbc-d2e03fa62d46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.719911] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 899.719911] env[69982]: value = "task-3864787" [ 899.719911] env[69982]: _type = "Task" [ 899.719911] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.729760] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864787, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.769355] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.057136] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating resource usage from migration 18bb5912-7e73-47de-bffe-7728a8253c69 [ 900.085142] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a8217447-bc22-4b84-925f-c3c09fb7228c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.085403] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 930c8740-5ad1-4491-8dd6-1a568eaa6f62 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.085469] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086200] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086200] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7af5a14d-f586-4746-9831-8be255581637 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.086200] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086200] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086428] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9123b08c-d2ec-4c4d-bade-0acdae75640a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086428] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d2684194-a688-4466-9852-1f4ff656f057 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086428] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a4064177-051b-4ec8-a1fc-fa5d299add8b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086428] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9a1de08e-3206-44cc-8d34-a5527faf9684 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086612] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance cd839916-6daf-4b31-941d-6305a585bfaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086612] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d43e5e7a-577d-4fe9-aff7-9012adfbdb9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086702] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9dcaa045-83c6-4e74-881d-a85a1991dbe3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.086900] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8b812422-4ca6-4d2b-b6af-873fdb21fab6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.086947] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 43a8e7dd-843b-49f6-9edb-60c2b380e9c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.087033] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 570675a8-3ec0-4fe6-b123-d3901d56b8cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.087604] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 3f896859-5a4a-4a59-bee8-b116e291fbe7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.087604] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 48dbc665-8286-4d5d-af4e-1a85d1742952 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.087604] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b3aeaa98-724b-4563-aeaf-a089906eb0eb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 900.087604] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d21659fd-015d-4f5b-b4b5-f38f550e0f00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.087782] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a61e3d25-9064-4f18-b7f1-0045b705571a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 900.109095] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 900.109095] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ef9386-9777-0aad-1fa0-adba97245f2d" [ 900.109095] env[69982]: _type = "HttpNfcLease" [ 900.109095] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 900.109422] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 900.109422] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ef9386-9777-0aad-1fa0-adba97245f2d" [ 900.109422] env[69982]: _type = "HttpNfcLease" [ 900.109422] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 900.110333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817569e5-45e7-45ad-9fe6-1157b0855f37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.120044] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 900.120216] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 900.217130] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d2b35939-160f-4a22-8679-446413d31868 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.231117] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864787, 'name': ReconfigVM_Task, 'duration_secs': 0.273182} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.231454] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfigured VM instance instance-0000003e to attach disk [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a61e3d25-9064-4f18-b7f1-0045b705571a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 900.232313] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b1f26e3-a177-49b4-b470-14ab3e7203df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.241467] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 900.241467] env[69982]: value = "task-3864788" [ 900.241467] env[69982]: _type = "Task" [ 900.241467] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.251177] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864788, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.496557] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afa7ecd-0065-445b-bdab-b863ccbb6258 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.505329] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Doing hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 900.505629] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-fb8d538e-7374-4dfb-9341-fa2815fb1abd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.512487] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 900.512487] env[69982]: value = "task-3864789" [ 900.512487] env[69982]: _type = "Task" [ 900.512487] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.523516] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864789, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.591745] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 69103bad-cb3f-4cd1-bfa1-c19b10395674 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 900.759698] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864788, 'name': Rename_Task, 'duration_secs': 0.146903} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.760566] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 900.760566] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af13fbdd-2fa9-49c8-b160-5011b5ac7fc1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.769452] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 900.769452] env[69982]: value = "task-3864790" [ 900.769452] env[69982]: _type = "Task" [ 900.769452] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.780325] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.026367] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864789, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.096348] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a3e3106d-b7df-49c8-9341-a843977aefe4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.294751] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.532331] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864789, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.599104] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 67613f71-a91e-4dae-8a6c-cd74c4821339 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 901.790023] env[69982]: DEBUG oslo_vmware.api [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864790, 'name': PowerOnVM_Task, 'duration_secs': 0.805891} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.790023] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 901.790023] env[69982]: INFO nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Took 8.32 seconds to spawn the instance on the hypervisor. [ 901.790023] env[69982]: DEBUG nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 901.790023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0096d0-f9a1-44a2-903c-4eb8f080a6d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.028146] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864789, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.103916] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ff2c680a-211a-44ad-b00d-1037f1fcb856 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.313843] env[69982]: INFO nova.compute.manager [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Took 53.21 seconds to build instance. [ 902.527533] env[69982]: DEBUG oslo_vmware.api [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864789, 'name': ResetVM_Task, 'duration_secs': 1.863265} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.527824] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Did hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 902.528095] env[69982]: DEBUG nova.compute.manager [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.529133] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78e2002-5197-4ab5-8c0d-cf31ffbe51b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.610144] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 331f218a-ad6b-4417-b56d-83113e0c92cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 902.821733] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31b7eb54-870a-47e7-918d-fd7e15ceb2b0 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.818s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.046396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b20d3af-4940-4e55-ae95-b492233a8e25 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.969s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.110633] env[69982]: INFO nova.compute.manager [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Rescuing [ 903.111039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.111257] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.111438] env[69982]: DEBUG nova.network.neutron [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 903.114256] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.325475] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 903.618810] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 10a4294b-39ce-4643-98b5-71ac283f05f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 903.850714] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.863420] env[69982]: DEBUG nova.network.neutron [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [{"id": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "address": "fa:16:3e:1d:77:ed", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1bc572-6c", "ovs_interfaceid": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.122536] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8358b105-7276-4292-804d-534f9fb1535e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 904.366522] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.627687] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance bba6f430-5af5-4d8a-9cf4-082207c170a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.135535] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a9a14fa8-7f58-48f9-994d-b5063833a81b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.181291] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.181747] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.637472] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 905.637851] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 18bb5912-7e73-47de-bffe-7728a8253c69 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 905.637851] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9b733e1e-0532-4d91-a460-6b1f1971f388 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 905.902843] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.903249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09a33628-f8e8-47cd-a022-97249ce9c76e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.911814] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 905.911814] env[69982]: value = "task-3864791" [ 905.911814] env[69982]: _type = "Task" [ 905.911814] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.922043] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.141124] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 5b79fc38-ace3-4f94-8d1c-b77912f44a1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 906.141680] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 906.141991] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 906.425725] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864791, 'name': PowerOffVM_Task, 'duration_secs': 0.214911} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.425725] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.425725] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35d466e-abbf-452a-bd81-5dbc405b0e76 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.452078] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e060242-57a6-4b23-b9b8-972113e43625 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.498462] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.498615] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2ca758b-1dd0-46b8-831e-fd10555f9963 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.508169] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 906.508169] env[69982]: value = "task-3864792" [ 906.508169] env[69982]: _type = "Task" [ 906.508169] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.518230] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 906.518514] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.518776] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.518931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.519180] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.519415] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7916f888-d8f2-4e54-bfb1-3e51260612c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.534686] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.534953] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.538281] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8ffd39c-31a3-4820-abdf-dd95e179b1c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.544693] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 906.544693] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fc7eec-9fdf-c46b-02aa-c7e023ac0383" [ 906.544693] env[69982]: _type = "Task" [ 906.544693] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.556526] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fc7eec-9fdf-c46b-02aa-c7e023ac0383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.679190] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8458a7b0-49a4-490e-8ad8-eb9444a2f0f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.687401] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7522902b-06fd-4dc5-ba5b-cae076c806cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.719851] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646a933a-a2a7-4929-97a2-adc166bd9ad7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.728597] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6e209f-b117-4a59-a3b8-f417a1327083 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.743306] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.057406] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fc7eec-9fdf-c46b-02aa-c7e023ac0383, 'name': SearchDatastore_Task, 'duration_secs': 0.011198} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.058228] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7443bf5-efbb-4c7f-86dd-ddd012647e4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.064107] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 907.064107] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b440c9-9e39-3bb6-2178-29127a46eb81" [ 907.064107] env[69982]: _type = "Task" [ 907.064107] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.072252] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b440c9-9e39-3bb6-2178-29127a46eb81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.246806] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.575869] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b440c9-9e39-3bb6-2178-29127a46eb81, 'name': SearchDatastore_Task, 'duration_secs': 0.023095} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.576207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.576493] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 907.576771] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12431e74-4cd1-417a-aed8-c77ea598fffc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.585432] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 907.585432] env[69982]: value = "task-3864793" [ 907.585432] env[69982]: _type = "Task" [ 907.585432] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.594902] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864793, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.752307] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 907.752654] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.718s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 907.752960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 50.120s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.754785] env[69982]: INFO nova.compute.claims [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 907.759036] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.759036] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 908.096675] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864793, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488905} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.097023] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 908.098094] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b914166b-0c5d-44bb-b576-4f7ae93da191 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.126746] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.127071] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48fa6aee-2f18-4fab-ad52-48410b79ab07 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.145884] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 908.145884] env[69982]: value = "task-3864794" [ 908.145884] env[69982]: _type = "Task" [ 908.145884] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.155964] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864794, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.272289] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] There are 38 instances to clean {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 908.272555] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: c34355fa-3712-4338-942d-acdb2f8a91ee] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 908.657713] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864794, 'name': ReconfigVM_Task, 'duration_secs': 0.385632} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.658201] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfigured VM instance instance-0000003e to attach disk [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.658927] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29656167-4921-46e8-a1b7-8c397834fa53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.696027] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd7e098a-742d-4cfc-9111-0c629b0ad4d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.715693] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 908.715693] env[69982]: value = "task-3864795" [ 908.715693] env[69982]: _type = "Task" [ 908.715693] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.726643] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864795, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.780847] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a427dc30-7e0f-4313-a8e9-f76451e4a112] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.227660] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864795, 'name': ReconfigVM_Task, 'duration_secs': 0.203763} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.230499] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.231011] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3662837f-9b44-4737-b600-b0f18dc836b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.237752] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 909.237752] env[69982]: value = "task-3864796" [ 909.237752] env[69982]: _type = "Task" [ 909.237752] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.247361] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.285894] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: c563267f-7699-4bd1-83cf-59ecef500ac3] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.301239] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069cac86-2567-400d-b450-257169cd58a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.309602] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870e6bc2-d9ee-4cee-adfe-9d3fdd82ed7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.344241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f8a26e-b58f-4330-97d2-e3261b97680d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.352933] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf4b8c5-0ac5-41bd-955d-d47b739317a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.368383] env[69982]: DEBUG nova.compute.provider_tree [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.656659] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.657671] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4997e7e3-c24f-40d4-862d-ae45ff4ffee9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.665364] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.665571] env[69982]: ERROR oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk due to incomplete transfer. [ 909.665826] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-b4bf0aa5-2774-4c4e-a448-1fc587a6edbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.684232] env[69982]: DEBUG oslo_vmware.rw_handles [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52732feb-96de-8774-225c-7972d89701fc/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 909.684520] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Uploaded image 5b8b1607-0ed9-475e-9c95-23428d81e909 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 909.687560] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 909.687933] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4f650de6-e1ed-420d-bc14-ce9b3eb32865 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.698923] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 909.698923] env[69982]: value = "task-3864797" [ 909.698923] env[69982]: _type = "Task" [ 909.698923] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.714101] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864797, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.747424] env[69982]: DEBUG oslo_vmware.api [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864796, 'name': PowerOnVM_Task, 'duration_secs': 0.464323} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.747732] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 909.750661] env[69982]: DEBUG nova.compute.manager [None req-c5d3cdb0-5844-4305-ac96-78a3c5560c52 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 909.751528] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91b43a0-b323-452c-abf6-0f51d74eb6eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.789497] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a70fa652-4726-4bc2-966f-530aaa79ba86] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 909.871622] env[69982]: DEBUG nova.scheduler.client.report [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.209881] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864797, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.292367] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: fae97132-44b4-4df1-bd34-ba694ea7016a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.376482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.377046] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 910.380034] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.247s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.380034] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.383203] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.377s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.384816] env[69982]: INFO nova.compute.claims [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.413898] env[69982]: INFO nova.scheduler.client.report [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Deleted allocations for instance a8217447-bc22-4b84-925f-c3c09fb7228c [ 910.712789] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864797, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.795670] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d5c23433-a0f3-4f0a-9c62-051d07dcd712] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 910.889447] env[69982]: DEBUG nova.compute.utils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 910.892979] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 910.892979] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 910.922938] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b6264bf-9cef-456c-8a5c-a2ddf2b3db57 tempest-VolumesAssistedSnapshotsTest-2093908278 tempest-VolumesAssistedSnapshotsTest-2093908278-project-member] Lock "a8217447-bc22-4b84-925f-c3c09fb7228c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 53.725s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.932470] env[69982]: DEBUG nova.policy [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493da3ee04094ba4ac17893d999ac99e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc588ded27b49d4826535649105aa88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.212238] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864797, 'name': Destroy_Task, 'duration_secs': 1.062029} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.212238] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Destroyed the VM [ 911.212318] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 911.212584] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-02010ed1-a004-4ca8-850e-eb71f50fc255 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.219809] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 911.219809] env[69982]: value = "task-3864798" [ 911.219809] env[69982]: _type = "Task" [ 911.219809] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.228555] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864798, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.299477] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d73153ad-9258-4c3c-9699-b6364408d631] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.319944] env[69982]: INFO nova.compute.manager [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Unrescuing [ 911.320299] env[69982]: DEBUG oslo_concurrency.lockutils [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.320476] env[69982]: DEBUG oslo_concurrency.lockutils [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquired lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.321921] env[69982]: DEBUG nova.network.neutron [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 911.396714] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 911.459294] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Successfully created port: b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 911.735057] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864798, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.804052] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 48162423-a117-437e-b171-9a40c7c6f49b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 911.985027] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202a1367-8c2f-48ae-af05-35d368cf1b34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.996589] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06934801-7426-4812-af06-49eade777e78 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.034111] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf79768-83da-490a-808c-961c1d174dcc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.043045] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42ee4bb-7e39-4794-88a7-5df2336be0dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.059346] env[69982]: DEBUG nova.compute.provider_tree [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.138549] env[69982]: DEBUG nova.network.neutron [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [{"id": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "address": "fa:16:3e:1d:77:ed", "network": {"id": "29eeafe7-2870-42f6-837a-535dc38c0aa4", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-749573937-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "6642ce24dd0040fea9a3f89b22343330", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dba18786-598d-4e06-96db-b3dc1717530f", "external-id": "nsx-vlan-transportzone-741", "segmentation_id": 741, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b1bc572-6c", "ovs_interfaceid": "2b1bc572-6c6d-4ce7-8f25-cffe67034c33", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.230650] env[69982]: DEBUG oslo_vmware.api [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864798, 'name': RemoveSnapshot_Task, 'duration_secs': 0.527156} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.230941] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 912.231226] env[69982]: INFO nova.compute.manager [None req-861248ae-c22d-4050-ae0f-848106d1a794 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Took 15.86 seconds to snapshot the instance on the hypervisor. [ 912.307898] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 24641406-5292-4497-b34f-9af0dcdc58d7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 912.415261] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 912.442571] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 912.442820] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.442975] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 912.443260] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.443398] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 912.443543] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 912.443754] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 912.443915] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 912.444096] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 912.444263] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 912.444437] env[69982]: DEBUG nova.virt.hardware [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 912.445355] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f416b35b-db28-4ada-9734-30e69fb726a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.453729] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6db821-e10e-4384-b6f2-44191c6659d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.563910] env[69982]: DEBUG nova.scheduler.client.report [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.641533] env[69982]: DEBUG oslo_concurrency.lockutils [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Releasing lock "refresh_cache-a61e3d25-9064-4f18-b7f1-0045b705571a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.642015] env[69982]: DEBUG nova.objects.instance [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lazy-loading 'flavor' on Instance uuid a61e3d25-9064-4f18-b7f1-0045b705571a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 912.812221] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: bd242bac-cd36-4fff-9325-fc14d5ceb566] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.072134] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.688s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.072134] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 913.075668] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.804s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.075897] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.077999] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.081s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.078208] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.080201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.124s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.081701] env[69982]: INFO nova.compute.claims [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.106177] env[69982]: INFO nova.scheduler.client.report [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Deleted allocations for instance 9dcaa045-83c6-4e74-881d-a85a1991dbe3 [ 913.115131] env[69982]: INFO nova.scheduler.client.report [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleted allocations for instance 930c8740-5ad1-4491-8dd6-1a568eaa6f62 [ 913.148633] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d5330f-69b8-4a04-ad2b-d08ca3a105ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.173582] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 913.173954] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a704be02-02fa-4124-83c7-591c5438d706 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.181300] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 913.181300] env[69982]: value = "task-3864799" [ 913.181300] env[69982]: _type = "Task" [ 913.181300] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.191328] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.267368] env[69982]: DEBUG nova.compute.manager [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Received event network-vif-plugged-b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 913.267614] env[69982]: DEBUG oslo_concurrency.lockutils [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] Acquiring lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.267795] env[69982]: DEBUG oslo_concurrency.lockutils [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.268024] env[69982]: DEBUG oslo_concurrency.lockutils [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.268204] env[69982]: DEBUG nova.compute.manager [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] No waiting events found dispatching network-vif-plugged-b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 913.268451] env[69982]: WARNING nova.compute.manager [req-42a9b8bf-3c4c-4b62-b2cf-61981c6404a3 req-28a0f57e-e906-46d2-9851-2b5ce2b06355 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Received unexpected event network-vif-plugged-b67025a3-947b-4ccc-8d88-60d00242778d for instance with vm_state building and task_state spawning. [ 913.275029] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Successfully updated port: b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.316845] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ad0c405f-48c8-4726-8e95-eb83a6e158fe] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 913.586314] env[69982]: DEBUG nova.compute.utils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 913.591847] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 913.592154] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 913.614349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0d174355-a75b-4073-9490-03a11cafc4f0 tempest-ServerPasswordTestJSON-1651974246 tempest-ServerPasswordTestJSON-1651974246-project-member] Lock "9dcaa045-83c6-4e74-881d-a85a1991dbe3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.946s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.619786] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60a7ab97-cfec-4413-b906-ba2537b5a5cb tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "930c8740-5ad1-4491-8dd6-1a568eaa6f62" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.767s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.637687] env[69982]: DEBUG nova.policy [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '301e104a68664770b89548343a23f90a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9ecd757af6ae4cb5b19c2b3517d12a9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 913.693107] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864799, 'name': PowerOffVM_Task, 'duration_secs': 0.220135} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.693400] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.700250] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfiguring VM instance instance-0000003e to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 913.701607] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5d6d2eb-69e4-4119-8214-9fc6d12b4600 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.722168] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 913.722168] env[69982]: value = "task-3864800" [ 913.722168] env[69982]: _type = "Task" [ 913.722168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.731507] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864800, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.776336] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.776727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.776942] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 913.824526] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 365b8207-f49b-4ee7-af6f-9d271eed2e38] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.037958] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Successfully created port: a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 914.101414] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 914.235070] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864800, 'name': ReconfigVM_Task, 'duration_secs': 0.250895} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.235365] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Reconfigured VM instance instance-0000003e to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 914.235630] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.235816] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ffc0997-effc-4dfa-a4ce-4db690819e26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.243811] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 914.243811] env[69982]: value = "task-3864801" [ 914.243811] env[69982]: _type = "Task" [ 914.243811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.250711] env[69982]: DEBUG nova.compute.manager [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.252721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf00e57-b64a-44ac-97cc-e124c46b2762 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.259013] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864801, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.328992] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 25957956-0d50-4b4f-8e5c-f55a1e182235] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.358920] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 914.665891] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e1b9a9-becb-4051-8c7b-ffcbfac51e7d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.674540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f9844a4-ce01-4f53-abc7-15a63860096b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.714385] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdf0b6e-c879-453d-931a-097776e9f4f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.723850] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f331bc-f6e3-432a-94cb-45989f62ee6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.742012] env[69982]: DEBUG nova.compute.provider_tree [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.754269] env[69982]: DEBUG oslo_vmware.api [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864801, 'name': PowerOnVM_Task, 'duration_secs': 0.36453} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.755828] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.759165] env[69982]: DEBUG nova.compute.manager [None req-df06b8ba-43db-4763-9b3b-7fd34cf8c40c tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.760449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60560ada-d0d6-4c97-9994-5099d23a5cde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.774577] env[69982]: INFO nova.compute.manager [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] instance snapshotting [ 914.780047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2270f61-7cc5-47e4-85fd-4c538f8aa678 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.802573] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-948f0c00-9b21-47e3-b76a-78d650c57011 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.836282] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: bb0e9037-42d5-43ae-b0e6-f08c9a2e6ec7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 914.972388] env[69982]: DEBUG nova.network.neutron [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Updating instance_info_cache with network_info: [{"id": "b67025a3-947b-4ccc-8d88-60d00242778d", "address": "fa:16:3e:48:5a:b1", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67025a3-94", "ovs_interfaceid": "b67025a3-947b-4ccc-8d88-60d00242778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.122477] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.154757] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.155030] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.155222] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.155483] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.155643] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.155787] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.155992] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.156158] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.156317] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.156470] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.156637] env[69982]: DEBUG nova.virt.hardware [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.157560] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76afd3cb-03fd-4955-b7c3-41ede7b027f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.166351] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c06169f-1877-4a22-9a4a-1f17de8d377d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.248129] env[69982]: DEBUG nova.scheduler.client.report [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.308172] env[69982]: DEBUG nova.compute.manager [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Received event network-changed-b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 915.308387] env[69982]: DEBUG nova.compute.manager [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Refreshing instance network info cache due to event network-changed-b67025a3-947b-4ccc-8d88-60d00242778d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 915.308598] env[69982]: DEBUG oslo_concurrency.lockutils [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] Acquiring lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.314602] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 915.315136] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bb99e747-4fcd-4e63-b07b-65216fbec475 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.084143] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 3e109fff-94bd-41a9-bc43-373143b7fda5] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.086062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.087029] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Instance network_info: |[{"id": "b67025a3-947b-4ccc-8d88-60d00242778d", "address": "fa:16:3e:48:5a:b1", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67025a3-94", "ovs_interfaceid": "b67025a3-947b-4ccc-8d88-60d00242778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.087029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.007s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.087505] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.093210] env[69982]: DEBUG oslo_concurrency.lockutils [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] Acquired lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.093376] env[69982]: DEBUG nova.network.neutron [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Refreshing network info cache for port b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 916.094771] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:5a:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31a7f15-a808-4199-9071-31fd05e316ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b67025a3-947b-4ccc-8d88-60d00242778d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.101794] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.102057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.452s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.103608] env[69982]: INFO nova.compute.claims [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.108895] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.109464] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 916.109464] env[69982]: value = "task-3864802" [ 916.109464] env[69982]: _type = "Task" [ 916.109464] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.109647] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21e20c18-9b95-4e8a-aa26-3cdc2657e7c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.138036] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864802, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.138036] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.138036] env[69982]: value = "task-3864803" [ 916.138036] env[69982]: _type = "Task" [ 916.138036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.148068] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864803, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.194646] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Successfully updated port: a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 916.596031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a61e3d25-9064-4f18-b7f1-0045b705571a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.596031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.596031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.596031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.596285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.597230] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5ba60eb7-ee6f-47e2-b6ca-b54817dab371] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 916.600278] env[69982]: DEBUG nova.compute.utils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.602393] env[69982]: INFO nova.compute.manager [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Terminating instance [ 916.608052] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.608052] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.637916] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864802, 'name': CreateSnapshot_Task, 'duration_secs': 0.495712} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.638434] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 916.642451] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20675b57-2ad5-4320-9778-d800f3ba4140 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.659174] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864803, 'name': CreateVM_Task, 'duration_secs': 0.305234} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.659566] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 916.660266] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.660472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.660743] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 916.661022] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3de137d-4ab1-4be4-aefe-c87d950da13c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.666607] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 916.666607] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529ebd0a-2f2a-247f-ef37-f930ff6d432e" [ 916.666607] env[69982]: _type = "Task" [ 916.666607] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.676820] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529ebd0a-2f2a-247f-ef37-f930ff6d432e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.697334] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.697554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquired lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.697714] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 916.742323] env[69982]: DEBUG nova.policy [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce065cecaa05441c857528b8025b4a9d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29c3fc30dcc447f0ae6708af4ba919ac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 917.061477] env[69982]: DEBUG nova.network.neutron [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Updated VIF entry in instance network info cache for port b67025a3-947b-4ccc-8d88-60d00242778d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 917.061477] env[69982]: DEBUG nova.network.neutron [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Updating instance_info_cache with network_info: [{"id": "b67025a3-947b-4ccc-8d88-60d00242778d", "address": "fa:16:3e:48:5a:b1", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb67025a3-94", "ovs_interfaceid": "b67025a3-947b-4ccc-8d88-60d00242778d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.105726] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5743a020-0c09-45ec-aca4-5ce367cc201a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.108911] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.111567] env[69982]: DEBUG nova.compute.manager [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 917.115375] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.115375] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e9b0d0e-63dd-4c7e-b7dd-a6c60bf0f52e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.124096] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.124363] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-154a6b33-4104-43bf-9af2-a99655e2f252 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.132812] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 917.132812] env[69982]: value = "task-3864804" [ 917.132812] env[69982]: _type = "Task" [ 917.132812] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.145456] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.169042] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 917.170701] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Successfully created port: 3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.172771] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-57f657c6-9b96-4aa8-896c-73f984509d7d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.189933] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529ebd0a-2f2a-247f-ef37-f930ff6d432e, 'name': SearchDatastore_Task, 'duration_secs': 0.011025} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.194389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.194683] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.194932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.195115] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.195428] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.195656] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 917.195656] env[69982]: value = "task-3864805" [ 917.195656] env[69982]: _type = "Task" [ 917.195656] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.196130] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83f9e466-5754-420d-b9c8-d6acd84cb624 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.211133] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864805, 'name': CloneVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.219027] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.219027] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.219027] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31ddd89b-f511-4fae-84cc-aaed61d43b1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.223739] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 917.223739] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ebee2-5476-1f7a-378a-bfdc03646074" [ 917.223739] env[69982]: _type = "Task" [ 917.223739] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.237454] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ebee2-5476-1f7a-378a-bfdc03646074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.260445] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 917.509118] env[69982]: DEBUG nova.network.neutron [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Updating instance_info_cache with network_info: [{"id": "a4b3377a-104c-41a0-968e-96828e5b1313", "address": "fa:16:3e:84:f2:68", "network": {"id": "3cb93df6-fb5d-45d7-b7c1-3fa26c51fce0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-707964562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ecd757af6ae4cb5b19c2b3517d12a9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b3377a-10", "ovs_interfaceid": "a4b3377a-104c-41a0-968e-96828e5b1313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.532447] env[69982]: DEBUG nova.compute.manager [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Received event network-vif-plugged-a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.532674] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Acquiring lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.533668] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.534023] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.534150] env[69982]: DEBUG nova.compute.manager [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] No waiting events found dispatching network-vif-plugged-a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 917.534324] env[69982]: WARNING nova.compute.manager [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Received unexpected event network-vif-plugged-a4b3377a-104c-41a0-968e-96828e5b1313 for instance with vm_state building and task_state spawning. [ 917.534483] env[69982]: DEBUG nova.compute.manager [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Received event network-changed-a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.534635] env[69982]: DEBUG nova.compute.manager [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Refreshing instance network info cache due to event network-changed-a4b3377a-104c-41a0-968e-96828e5b1313. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 917.534845] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Acquiring lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.565076] env[69982]: DEBUG oslo_concurrency.lockutils [req-25dc290e-9724-4724-b0c2-26fa34c05c4b req-a80dc99a-8f15-4f13-8d7c-1638fb930e64 service nova] Releasing lock "refresh_cache-69103bad-cb3f-4cd1-bfa1-c19b10395674" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.613124] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: f9124657-d1c5-4a93-9d4a-3b06ca60ec63] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 917.645966] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864804, 'name': PowerOffVM_Task, 'duration_secs': 0.298613} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.649536] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.649786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.653153] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c6196e0-fbb0-4f0d-92ed-f2cae92aee35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.714623] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864805, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.735588] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.735588] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.735588] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleting the datastore file [datastore1] a61e3d25-9064-4f18-b7f1-0045b705571a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.740300] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7de60dc-7a6e-4bdc-bcbb-8665beba683c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.743349] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522ebee2-5476-1f7a-378a-bfdc03646074, 'name': SearchDatastore_Task, 'duration_secs': 0.015159} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.749391] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fad88a7-b2d0-4e51-803a-ab2affe08ab7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.754973] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 917.754973] env[69982]: value = "task-3864807" [ 917.754973] env[69982]: _type = "Task" [ 917.754973] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.761382] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 917.761382] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f93d4-c6fc-f632-ec89-9eb88079eeab" [ 917.761382] env[69982]: _type = "Task" [ 917.761382] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.772714] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.780071] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520f93d4-c6fc-f632-ec89-9eb88079eeab, 'name': SearchDatastore_Task, 'duration_secs': 0.011757} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.780363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.780888] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 69103bad-cb3f-4cd1-bfa1-c19b10395674/69103bad-cb3f-4cd1-bfa1-c19b10395674.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.781319] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6832a02b-2f41-446c-8592-c8f3627c438d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.790663] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00544741-13f9-48f5-87f4-13e657da5a86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.795977] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 917.795977] env[69982]: value = "task-3864808" [ 917.795977] env[69982]: _type = "Task" [ 917.795977] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.803178] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7771b06a-0801-4734-890c-a409b3a355fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.810683] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.843594] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5852bb1-5ebf-48ca-a050-9413138bce77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.852789] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e360f3-aa5f-4173-9239-a325f931a53f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.870100] env[69982]: DEBUG nova.compute.provider_tree [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.011683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Releasing lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.012118] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Instance network_info: |[{"id": "a4b3377a-104c-41a0-968e-96828e5b1313", "address": "fa:16:3e:84:f2:68", "network": {"id": "3cb93df6-fb5d-45d7-b7c1-3fa26c51fce0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-707964562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ecd757af6ae4cb5b19c2b3517d12a9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b3377a-10", "ovs_interfaceid": "a4b3377a-104c-41a0-968e-96828e5b1313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 918.012400] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Acquired lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 918.012714] env[69982]: DEBUG nova.network.neutron [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Refreshing network info cache for port a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 918.016073] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:f2:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4b3377a-104c-41a0-968e-96828e5b1313', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 918.023113] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Creating folder: Project (9ecd757af6ae4cb5b19c2b3517d12a9b). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.024185] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0208f7e8-7162-44dc-a0c6-d4c5b2107b6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.042378] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Created folder: Project (9ecd757af6ae4cb5b19c2b3517d12a9b) in parent group-v767796. [ 918.042378] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Creating folder: Instances. Parent ref: group-v767984. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 918.042378] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb366d3d-533e-45fd-9dc5-4771b8384400 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.056031] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Created folder: Instances in parent group-v767984. [ 918.056031] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.056031] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 918.056031] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bbae005-fc92-4f81-8a17-a9d29b01111a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.076747] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 918.076747] env[69982]: value = "task-3864811" [ 918.076747] env[69982]: _type = "Task" [ 918.076747] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.085768] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864811, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.120703] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 67f59d53-c61b-48ad-b55d-710595e9dae3] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.124934] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.158166] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.158651] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.159514] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.159881] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.160158] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.160526] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.160902] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.161374] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.161452] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.161662] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.161882] env[69982]: DEBUG nova.virt.hardware [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.162845] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c640ed7-ee41-4632-8ea4-0d02af4b8bd4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.175562] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89447da7-fe4f-463b-9c7f-7ede8d3f719f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.216269] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864805, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.267926] env[69982]: DEBUG oslo_vmware.api [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.297811} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.268875] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.269101] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.269296] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.269474] env[69982]: INFO nova.compute.manager [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 918.269743] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.269952] env[69982]: DEBUG nova.compute.manager [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.270069] env[69982]: DEBUG nova.network.neutron [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 918.310554] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864808, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.373880] env[69982]: DEBUG nova.scheduler.client.report [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.599025] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864811, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.624430] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ebd9e006-a591-44f7-867c-041731b9d45a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 918.715632] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864805, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.801802] env[69982]: DEBUG nova.compute.manager [req-4eac2f4b-4b0c-44fd-b97e-886174ccb93b req-a6dd174e-1061-45fc-9967-cb8beac9637a service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Received event network-vif-deleted-2b1bc572-6c6d-4ce7-8f25-cffe67034c33 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.801802] env[69982]: INFO nova.compute.manager [req-4eac2f4b-4b0c-44fd-b97e-886174ccb93b req-a6dd174e-1061-45fc-9967-cb8beac9637a service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Neutron deleted interface 2b1bc572-6c6d-4ce7-8f25-cffe67034c33; detaching it from the instance and deleting it from the info cache [ 918.801802] env[69982]: DEBUG nova.network.neutron [req-4eac2f4b-4b0c-44fd-b97e-886174ccb93b req-a6dd174e-1061-45fc-9967-cb8beac9637a service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.822907] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708824} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.824384] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 69103bad-cb3f-4cd1-bfa1-c19b10395674/69103bad-cb3f-4cd1-bfa1-c19b10395674.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.824384] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.824620] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72dbb19f-3193-47da-b130-f7dc39fd5f4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.833890] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 918.833890] env[69982]: value = "task-3864812" [ 918.833890] env[69982]: _type = "Task" [ 918.833890] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.844684] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864812, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.880244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.778s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.881037] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.886030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.112s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.886030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.889052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.956s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.891144] env[69982]: INFO nova.compute.claims [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 919.001760] env[69982]: DEBUG nova.network.neutron [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Updated VIF entry in instance network info cache for port a4b3377a-104c-41a0-968e-96828e5b1313. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 919.002203] env[69982]: DEBUG nova.network.neutron [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Updating instance_info_cache with network_info: [{"id": "a4b3377a-104c-41a0-968e-96828e5b1313", "address": "fa:16:3e:84:f2:68", "network": {"id": "3cb93df6-fb5d-45d7-b7c1-3fa26c51fce0", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-707964562-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9ecd757af6ae4cb5b19c2b3517d12a9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b3377a-10", "ovs_interfaceid": "a4b3377a-104c-41a0-968e-96828e5b1313", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.090680] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864811, 'name': CreateVM_Task, 'duration_secs': 0.637573} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.090904] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 919.091741] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.091958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.092411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 919.092719] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b37b4ae8-231e-422b-a28d-6dc2faa35d3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.099295] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 919.099295] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200a5d5-9ee8-aabd-c942-0f2093261714" [ 919.099295] env[69982]: _type = "Task" [ 919.099295] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.109345] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200a5d5-9ee8-aabd-c942-0f2093261714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.127643] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: fb6d0f81-0eb1-45aa-a3ad-d3958de582c0] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.172237] env[69982]: DEBUG nova.network.neutron [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.213279] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864805, 'name': CloneVM_Task, 'duration_secs': 1.909905} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.213575] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Created linked-clone VM from snapshot [ 919.214409] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ff73f0-1bf7-463e-88eb-fd5efe55b583 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.224073] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Uploading image a60d2423-c997-4c2a-935c-cc8146777620 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 919.258508] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 919.258508] env[69982]: value = "vm-767983" [ 919.258508] env[69982]: _type = "VirtualMachine" [ 919.258508] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 919.258809] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-23856707-85d0-43a3-b11c-2ba95102e724 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.270878] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease: (returnval){ [ 919.270878] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ade800-551a-3945-fd14-d329eec87c1d" [ 919.270878] env[69982]: _type = "HttpNfcLease" [ 919.270878] env[69982]: } obtained for exporting VM: (result){ [ 919.270878] env[69982]: value = "vm-767983" [ 919.270878] env[69982]: _type = "VirtualMachine" [ 919.270878] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 919.271245] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the lease: (returnval){ [ 919.271245] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ade800-551a-3945-fd14-d329eec87c1d" [ 919.271245] env[69982]: _type = "HttpNfcLease" [ 919.271245] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 919.281106] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 919.281106] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ade800-551a-3945-fd14-d329eec87c1d" [ 919.281106] env[69982]: _type = "HttpNfcLease" [ 919.281106] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 919.307998] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa1e080e-290d-446a-827d-78808d64cce4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.320084] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11023222-0f6c-4833-a033-da30bca51551 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.345856] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864812, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.346228] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 919.347475] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f591e4b-0c13-4807-89da-4d984fecaa8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.368997] env[69982]: DEBUG nova.compute.manager [req-4eac2f4b-4b0c-44fd-b97e-886174ccb93b req-a6dd174e-1061-45fc-9967-cb8beac9637a service nova] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Detach interface failed, port_id=2b1bc572-6c6d-4ce7-8f25-cffe67034c33, reason: Instance a61e3d25-9064-4f18-b7f1-0045b705571a could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 919.389732] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 69103bad-cb3f-4cd1-bfa1-c19b10395674/69103bad-cb3f-4cd1-bfa1-c19b10395674.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 919.390081] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8993034b-be07-413f-be56-a165bfdf6d80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.410349] env[69982]: DEBUG nova.compute.utils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.413187] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d770bac9-953f-4d8e-a5a0-a234d3c8f87e tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 61.382s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.414235] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.414354] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.416788] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 35.599s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.417021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.417241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.417411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.419310] env[69982]: INFO nova.compute.manager [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Terminating instance [ 919.423739] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 919.423739] env[69982]: value = "task-3864814" [ 919.423739] env[69982]: _type = "Task" [ 919.423739] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.436134] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.470998] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Successfully updated port: 3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.495369] env[69982]: DEBUG nova.policy [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '093054e7b12b4e13b1b90d9961e0e202', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fe14238478147f8bab643b39a1bcb34', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.505560] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef3c9dd4-cc90-4b01-be5d-60cb5033ae8a req-2f3cd3a0-0740-411a-a3d8-da9f93e408d8 service nova] Releasing lock "refresh_cache-a3e3106d-b7df-49c8-9341-a843977aefe4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.569916] env[69982]: DEBUG nova.compute.manager [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Received event network-vif-plugged-3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 919.570225] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Acquiring lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.570475] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.570655] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.570844] env[69982]: DEBUG nova.compute.manager [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] No waiting events found dispatching network-vif-plugged-3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.571051] env[69982]: WARNING nova.compute.manager [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Received unexpected event network-vif-plugged-3e3f4839-831a-4b7d-8026-14ba4c18d895 for instance with vm_state building and task_state spawning. [ 919.571230] env[69982]: DEBUG nova.compute.manager [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Received event network-changed-3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 919.571400] env[69982]: DEBUG nova.compute.manager [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Refreshing instance network info cache due to event network-changed-3e3f4839-831a-4b7d-8026-14ba4c18d895. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 919.571589] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Acquiring lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.571720] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Acquired lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.571871] env[69982]: DEBUG nova.network.neutron [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Refreshing network info cache for port 3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 919.611262] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5200a5d5-9ee8-aabd-c942-0f2093261714, 'name': SearchDatastore_Task, 'duration_secs': 0.011547} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.611585] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 919.611817] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 919.612070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.612231] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 919.613781] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 919.613781] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0fb8bd21-b76b-4401-99bb-da3c61e236ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.621671] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 919.621868] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 919.622701] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84201f0f-08f3-4dad-a1be-020090e024c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.629486] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 919.629486] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bbe6c8-c5ba-02e0-b368-34d984af7655" [ 919.629486] env[69982]: _type = "Task" [ 919.629486] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.633100] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9d1b0a5f-e096-4641-a077-f0949135efbb] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 919.640713] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bbe6c8-c5ba-02e0-b368-34d984af7655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.675150] env[69982]: INFO nova.compute.manager [-] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Took 1.40 seconds to deallocate network for instance. [ 919.781413] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 919.781413] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ade800-551a-3945-fd14-d329eec87c1d" [ 919.781413] env[69982]: _type = "HttpNfcLease" [ 919.781413] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 919.781764] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 919.781764] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ade800-551a-3945-fd14-d329eec87c1d" [ 919.781764] env[69982]: _type = "HttpNfcLease" [ 919.781764] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 919.782495] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0382b6-f94b-4cd5-b302-f50b9f297ef5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.791229] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 919.791484] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 919.884355] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de92c5ea-5d5e-4f31-b196-fcb7398bba23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.934390] env[69982]: DEBUG nova.compute.utils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.936182] env[69982]: DEBUG nova.compute.manager [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 919.936409] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.939826] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3de45df-ee4d-4911-9589-a7690f93596b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.955401] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a23f1e-8bf4-4543-be42-f35e442736cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.966591] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864814, 'name': ReconfigVM_Task, 'duration_secs': 0.325817} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.971101] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 69103bad-cb3f-4cd1-bfa1-c19b10395674/69103bad-cb3f-4cd1-bfa1-c19b10395674.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.972165] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d7378e1-4c73-4ce3-b9b6-ea31f08e24d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.974356] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 919.981438] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 919.981438] env[69982]: value = "task-3864815" [ 919.981438] env[69982]: _type = "Task" [ 919.981438] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.001211] env[69982]: WARNING nova.virt.vmwareapi.vmops [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 881cbfae-7630-45e0-a8ad-b2cd283689ea could not be found. [ 920.001458] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.001637] env[69982]: INFO nova.compute.manager [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Took 0.07 seconds to destroy the instance on the hypervisor. [ 920.001886] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 920.005174] env[69982]: DEBUG nova.compute.manager [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 920.005289] env[69982]: DEBUG nova.network.neutron [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 920.011315] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864815, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.139823] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 589419ea-c609-45bb-bde5-3b22d9ff111e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.141875] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bbe6c8-c5ba-02e0-b368-34d984af7655, 'name': SearchDatastore_Task, 'duration_secs': 0.015201} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.142772] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77c59486-f8c0-4e5c-a5f3-29ff7dc3d688 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.149190] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 920.149190] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52948970-7b92-f084-2a44-c1878582f486" [ 920.149190] env[69982]: _type = "Task" [ 920.149190] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.161290] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Successfully created port: ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.168682] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52948970-7b92-f084-2a44-c1878582f486, 'name': SearchDatastore_Task, 'duration_secs': 0.010551} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.172862] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 920.172862] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a3e3106d-b7df-49c8-9341-a843977aefe4/a3e3106d-b7df-49c8-9341-a843977aefe4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 920.172862] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48999d61-78ed-44ba-92dd-02dcf90d7b10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.172862] env[69982]: DEBUG nova.network.neutron [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 920.182158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.185650] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 920.185650] env[69982]: value = "task-3864816" [ 920.185650] env[69982]: _type = "Task" [ 920.185650] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.200918] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.439132] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 920.501138] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864815, 'name': Rename_Task, 'duration_secs': 0.157324} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.502341] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.502661] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4c89e226-4d0a-4ed5-8305-747c4aed3fc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.510681] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 920.510681] env[69982]: value = "task-3864817" [ 920.510681] env[69982]: _type = "Task" [ 920.510681] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.525286] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864817, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.618323] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3989418d-79bb-4122-83fb-3b3ac270645d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.628533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ae073f-6902-4302-82c1-f8a2073d1a03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.669180] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 78ba4fa9-4083-4204-a5b4-88cdcec6ca13] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 920.669993] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794513e5-729a-41a2-ac7f-273e5932cedf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.679509] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f73de07-92e5-4c9b-b748-e2d645176f2e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.685491] env[69982]: DEBUG nova.network.neutron [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.698698] env[69982]: DEBUG nova.compute.provider_tree [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.710038] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.817132] env[69982]: DEBUG nova.network.neutron [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.024892] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864817, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.174692] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6efb0df5-0435-424a-b4cc-1eaefdcf388d] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.191901] env[69982]: DEBUG oslo_concurrency.lockutils [req-b8fec186-090b-4cb6-aefb-0a529845a998 req-460de295-aab8-44d9-aec6-f1b4f4f295d4 service nova] Releasing lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.192631] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquired lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.192828] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 921.205633] env[69982]: DEBUG nova.scheduler.client.report [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 921.210124] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.320221] env[69982]: INFO nova.compute.manager [-] [instance: 881cbfae-7630-45e0-a8ad-b2cd283689ea] Took 1.31 seconds to deallocate network for instance. [ 921.452190] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 921.492796] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:05:10Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1518974210',id=20,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-301667166',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 921.494078] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 921.494078] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 921.494078] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 921.494078] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 921.494912] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 921.494912] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 921.494912] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 921.494912] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 921.494912] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 921.495122] env[69982]: DEBUG nova.virt.hardware [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 921.495879] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea69c8c2-6b53-4308-b128-107ab118813c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.506214] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f455d4bf-c10c-4dc4-bef0-d65073ae4a79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.530421] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864817, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.678618] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 049c7405-3daf-4064-8770-efbbf15c832e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 921.705654] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864816, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.459519} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.707140] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a3e3106d-b7df-49c8-9341-a843977aefe4/a3e3106d-b7df-49c8-9341-a843977aefe4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 921.707818] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 921.708578] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ddc4ad3c-1e59-4312-81f0-9145abba46fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.713981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.824s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.714754] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 921.720169] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.423s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.720463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.723471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.662s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.726029] env[69982]: INFO nova.compute.claims [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.748824] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 921.748824] env[69982]: value = "task-3864818" [ 921.748824] env[69982]: _type = "Task" [ 921.748824] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.761586] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864818, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.789642] env[69982]: INFO nova.scheduler.client.report [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Deleted allocations for instance 7af5a14d-f586-4746-9831-8be255581637 [ 921.791797] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 922.024316] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864817, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.081321] env[69982]: DEBUG nova.network.neutron [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Updating instance_info_cache with network_info: [{"id": "3e3f4839-831a-4b7d-8026-14ba4c18d895", "address": "fa:16:3e:0e:9f:9d", "network": {"id": "097d55a4-85da-4b0c-af82-dd4b58320076", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-452666446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29c3fc30dcc447f0ae6708af4ba919ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e3f4839-83", "ovs_interfaceid": "3e3f4839-831a-4b7d-8026-14ba4c18d895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.183656] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 2d554902-bf28-4ee2-b9d6-4219e54246fc] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.241925] env[69982]: DEBUG nova.compute.utils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 922.247167] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 922.248305] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 922.266982] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864818, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087537} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.266982] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 922.268122] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c9b0ba-a07d-4d84-a125-f804a52e9fe1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.293530] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] a3e3106d-b7df-49c8-9341-a843977aefe4/a3e3106d-b7df-49c8-9341-a843977aefe4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 922.294198] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63c4ca86-bf21-41e7-9ffe-9a6a8fc73631 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.318075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a91a1f21-01c7-4ea1-a6e2-d83e02fda5af tempest-ServersTestManualDisk-1785355582 tempest-ServersTestManualDisk-1785355582-project-member] Lock "7af5a14d-f586-4746-9831-8be255581637" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.367s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.326345] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 922.326345] env[69982]: value = "task-3864819" [ 922.326345] env[69982]: _type = "Task" [ 922.326345] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.337355] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864819, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.352212] env[69982]: DEBUG nova.policy [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ea62e43062d24ecfb42e9d6b7e5bb8a2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5102c0ae7ade4db1a99486f7632dbe3e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 922.358462] env[69982]: DEBUG oslo_concurrency.lockutils [None req-040851fc-0888-4526-8f98-d3d4da263519 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "881cbfae-7630-45e0-a8ad-b2cd283689ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.942s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.523775] env[69982]: DEBUG oslo_vmware.api [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864817, 'name': PowerOnVM_Task, 'duration_secs': 1.54742} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.524150] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.524486] env[69982]: INFO nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Took 10.11 seconds to spawn the instance on the hypervisor. [ 922.524717] env[69982]: DEBUG nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.525606] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94908343-56d7-475f-8f6f-5f05bba9aca8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.584779] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Releasing lock "refresh_cache-67613f71-a91e-4dae-8a6c-cd74c4821339" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.585443] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance network_info: |[{"id": "3e3f4839-831a-4b7d-8026-14ba4c18d895", "address": "fa:16:3e:0e:9f:9d", "network": {"id": "097d55a4-85da-4b0c-af82-dd4b58320076", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-452666446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29c3fc30dcc447f0ae6708af4ba919ac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4813d311-2016-4f6a-a4b8-7613ab624fba", "external-id": "nsx-vlan-transportzone-386", "segmentation_id": 386, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e3f4839-83", "ovs_interfaceid": "3e3f4839-831a-4b7d-8026-14ba4c18d895", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 922.585622] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0e:9f:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4813d311-2016-4f6a-a4b8-7613ab624fba', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e3f4839-831a-4b7d-8026-14ba4c18d895', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 922.593726] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Creating folder: Project (29c3fc30dcc447f0ae6708af4ba919ac). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.594399] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ce837af-2f27-4607-aa02-9f86697d10be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.607520] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Created folder: Project (29c3fc30dcc447f0ae6708af4ba919ac) in parent group-v767796. [ 922.607520] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Creating folder: Instances. Parent ref: group-v767987. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 922.607520] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbfcd6c8-9161-4201-be0d-c2857a092407 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.621022] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Created folder: Instances in parent group-v767987. [ 922.621382] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.621664] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 922.622012] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8efdc7f6-5907-4a2b-b6ef-53fe41c1bc2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.647815] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 922.647815] env[69982]: value = "task-3864822" [ 922.647815] env[69982]: _type = "Task" [ 922.647815] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.657374] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864822, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.688118] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4a502d1e-8a86-4ec7-b1d6-9d68c3a47f9b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 922.720223] env[69982]: DEBUG nova.compute.manager [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Received event network-vif-plugged-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 922.720504] env[69982]: DEBUG oslo_concurrency.lockutils [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] Acquiring lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.720807] env[69982]: DEBUG oslo_concurrency.lockutils [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.720962] env[69982]: DEBUG oslo_concurrency.lockutils [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.721398] env[69982]: DEBUG nova.compute.manager [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] No waiting events found dispatching network-vif-plugged-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.721879] env[69982]: WARNING nova.compute.manager [req-a1569d8d-8301-4d65-a66d-7a8d6ca8171f req-426397fb-1918-4c06-8778-aca2fe2ce348 service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Received unexpected event network-vif-plugged-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 for instance with vm_state building and task_state spawning. [ 922.758338] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 922.826014] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Successfully updated port: ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.846033] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864819, 'name': ReconfigVM_Task, 'duration_secs': 0.353293} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.846033] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Reconfigured VM instance instance-00000040 to attach disk [datastore2] a3e3106d-b7df-49c8-9341-a843977aefe4/a3e3106d-b7df-49c8-9341-a843977aefe4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 922.846256] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aeae39bf-a387-49c2-9bd8-515fd97a34c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.854862] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 922.854862] env[69982]: value = "task-3864823" [ 922.854862] env[69982]: _type = "Task" [ 922.854862] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.866027] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864823, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.986579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.987090] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.047398] env[69982]: INFO nova.compute.manager [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Took 65.44 seconds to build instance. [ 923.088226] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Successfully created port: 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.166425] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864822, 'name': CreateVM_Task, 'duration_secs': 0.466825} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.166425] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.166914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.167100] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.167433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 923.175238] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1256745-da8a-4eb5-99bb-0b80c80a0f93 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.180481] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 923.180481] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529db866-2081-9f79-be95-bb93d314ad36" [ 923.180481] env[69982]: _type = "Task" [ 923.180481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.189820] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529db866-2081-9f79-be95-bb93d314ad36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.193161] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5c9b6dc6-887e-477a-b902-135fe06cfbbd] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.335293] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.335293] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.335293] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 923.369082] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864823, 'name': Rename_Task, 'duration_secs': 0.170486} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.374532] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 923.377040] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efd92f2b-5afb-4653-9b6e-fa87334c27e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.387729] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 923.387729] env[69982]: value = "task-3864824" [ 923.387729] env[69982]: _type = "Task" [ 923.387729] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.404982] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.490895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85672150-bb43-4cb2-b485-40fd8637a5af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.497946] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cf1989-eb8f-4d1f-a8fe-c4b7cdab8f88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.544484] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801d470d-8a7f-4b98-ba60-74212b237b6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.554349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a616dfb7-99ce-4220-b58f-343b7c613fd7 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.966s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.557289] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9abff93-02c0-4fcd-a009-17aa93a7771b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.578367] env[69982]: DEBUG nova.compute.provider_tree [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.691759] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529db866-2081-9f79-be95-bb93d314ad36, 'name': SearchDatastore_Task, 'duration_secs': 0.024205} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.692309] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.692391] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 923.692792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.692792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.692948] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 923.693222] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c3aae03-dc91-4beb-b36a-7b2cf83d12fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.696191] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: bba73604-c54f-4643-9e4c-326b643b3d51] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 923.711373] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 923.711373] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 923.711373] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-359e284b-0527-4bba-8255-48f82cb339b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.718213] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 923.718213] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52abf359-2c39-432c-f33b-0903e2df3596" [ 923.718213] env[69982]: _type = "Task" [ 923.718213] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.731915] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52abf359-2c39-432c-f33b-0903e2df3596, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.772317] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 923.888928] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 923.903328] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864824, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.054596] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 924.054866] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.055112] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 924.055364] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.055548] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 924.055728] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 924.055979] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 924.056193] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 924.056453] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 924.056692] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 924.056909] env[69982]: DEBUG nova.virt.hardware [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 924.061136] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b5db51-06a8-4187-be3c-93f322fd7279 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.066467] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 924.080030] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c563e2-9c3a-4421-8f17-3be2f5a03137 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.086216] env[69982]: DEBUG nova.scheduler.client.report [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.143786] env[69982]: DEBUG nova.network.neutron [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updating instance_info_cache with network_info: [{"id": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "address": "fa:16:3e:45:c0:5c", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1c200e-18", "ovs_interfaceid": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.173354] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5100234f-ea02-40bf-b883-fa9a159c7637" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.173409] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.202327] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 303c7ee1-8d26-460b-aab9-d55c71cf8a73] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.230630] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52abf359-2c39-432c-f33b-0903e2df3596, 'name': SearchDatastore_Task, 'duration_secs': 0.017018} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.231438] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e801fc38-c5e1-4957-84ee-4945bcaae852 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.240941] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 924.240941] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ad2e37-92cc-c193-2cea-cd758f2b07b2" [ 924.240941] env[69982]: _type = "Task" [ 924.240941] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.251298] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ad2e37-92cc-c193-2cea-cd758f2b07b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.403035] env[69982]: DEBUG oslo_vmware.api [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864824, 'name': PowerOnVM_Task, 'duration_secs': 0.666497} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.405029] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 924.405029] env[69982]: INFO nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Took 9.28 seconds to spawn the instance on the hypervisor. [ 924.405029] env[69982]: DEBUG nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 924.405029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2f927f-37fd-44b8-b69f-14e3453d36b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.598629] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.603687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.880s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.604228] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 924.606840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.864s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.608060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.609207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.996s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.613141] env[69982]: INFO nova.compute.claims [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 924.636380] env[69982]: INFO nova.scheduler.client.report [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance 3f896859-5a4a-4a59-bee8-b116e291fbe7 [ 924.644985] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.645354] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Instance network_info: |[{"id": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "address": "fa:16:3e:45:c0:5c", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1c200e-18", "ovs_interfaceid": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 924.645772] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:c0:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad72c645-a67d-4efd-b563-28e44077e68d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ff1c200e-1846-4f71-8cf1-a64fa7ea9a92', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.656292] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.656590] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.656858] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70124216-613c-49c4-ae78-43f52d22ff4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.683747] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.683747] env[69982]: value = "task-3864825" [ 924.683747] env[69982]: _type = "Task" [ 924.683747] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.694167] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864825, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.708262] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6de35617-22cf-4a32-8651-28ea67532b8f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 924.757534] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ad2e37-92cc-c193-2cea-cd758f2b07b2, 'name': SearchDatastore_Task, 'duration_secs': 0.024071} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.757649] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.758026] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 67613f71-a91e-4dae-8a6c-cd74c4821339/67613f71-a91e-4dae-8a6c-cd74c4821339.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.758311] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0829f1c-0f60-4ada-8ef0-04e5238ed14a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.767220] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 924.767220] env[69982]: value = "task-3864826" [ 924.767220] env[69982]: _type = "Task" [ 924.767220] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.779096] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864826, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.853369] env[69982]: DEBUG nova.compute.manager [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Received event network-changed-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 924.854708] env[69982]: DEBUG nova.compute.manager [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Refreshing instance network info cache due to event network-changed-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 924.854708] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Acquiring lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.854708] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Acquired lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.854708] env[69982]: DEBUG nova.network.neutron [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Refreshing network info cache for port ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 924.907130] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Successfully updated port: 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 924.924774] env[69982]: INFO nova.compute.manager [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Took 62.94 seconds to build instance. [ 925.117150] env[69982]: DEBUG nova.compute.utils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 925.119362] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 925.123033] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 925.125464] env[69982]: DEBUG nova.compute.manager [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 925.128776] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f105f6-6bfa-4a2e-b524-4f1f0a6b9bd7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.145395] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2bc9d60d-88aa-48f2-89a0-6087232c73e6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "3f896859-5a4a-4a59-bee8-b116e291fbe7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.961s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.194039] env[69982]: DEBUG nova.policy [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '114e89c3714d4ce39f4947a0aa567aba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5ba58648e534c55953c89a4eae7caf4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 925.203184] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864825, 'name': CreateVM_Task, 'duration_secs': 0.433854} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.203520] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.204121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.204446] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.204794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.205112] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7117506-4680-4f14-b5aa-ad1356ebc0ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.211950] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 925.211950] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528963d0-8f28-3598-9099-550776e50e81" [ 925.211950] env[69982]: _type = "Task" [ 925.211950] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.212497] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 743a4a52-ce35-4ec1-9286-e0c470e87186] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 925.232712] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528963d0-8f28-3598-9099-550776e50e81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.285543] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864826, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.411319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.411319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.411319] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 925.430890] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db710883-6348-4fbe-ad50-740f4a54fdc1 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.360s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.627758] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 925.648106] env[69982]: INFO nova.compute.manager [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] instance snapshotting [ 925.650785] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097af529-029f-4b57-9a84-0a4e4a62e522 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.686253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2955a5af-daa9-4fbf-9171-857b339b0750 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.724345] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5bfdb2dc-c4b7-4c1f-94de-3505ac1550f8] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 925.726580] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528963d0-8f28-3598-9099-550776e50e81, 'name': SearchDatastore_Task, 'duration_secs': 0.031481} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.729955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.730245] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.730793] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.730793] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.730898] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.731359] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8cc3767-ad50-46ad-aaaa-091f23ade3cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.745213] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.745213] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.750340] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6328dcfe-7c24-498a-b8a0-bac231fb7c4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.757565] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 925.757565] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb47f4-e11f-baf7-ee9c-fc05ead7b7c2" [ 925.757565] env[69982]: _type = "Task" [ 925.757565] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.767867] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb47f4-e11f-baf7-ee9c-fc05ead7b7c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.780751] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864826, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63947} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.781173] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 67613f71-a91e-4dae-8a6c-cd74c4821339/67613f71-a91e-4dae-8a6c-cd74c4821339.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 925.781353] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 925.781642] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dd5ee142-8f7b-4c2a-a834-f21013b1a275 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.788951] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 925.788951] env[69982]: value = "task-3864827" [ 925.788951] env[69982]: _type = "Task" [ 925.788951] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.802918] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.933800] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 925.951691] env[69982]: DEBUG nova.network.neutron [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updated VIF entry in instance network info cache for port ff1c200e-1846-4f71-8cf1-a64fa7ea9a92. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 925.952057] env[69982]: DEBUG nova.network.neutron [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updating instance_info_cache with network_info: [{"id": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "address": "fa:16:3e:45:c0:5c", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1c200e-18", "ovs_interfaceid": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.958482] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Successfully created port: c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.983856] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 926.200081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 926.200081] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d9ecb304-7065-4a1a-9d59-721c43b9fb1f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.214681] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 926.214681] env[69982]: value = "task-3864828" [ 926.214681] env[69982]: _type = "Task" [ 926.214681] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.227585] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864828, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.230344] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: e82ae1bd-c31b-44ca-9608-9348b8eac8dc] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 926.271310] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb47f4-e11f-baf7-ee9c-fc05ead7b7c2, 'name': SearchDatastore_Task, 'duration_secs': 0.015352} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.275827] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19889fd2-e367-4f3c-8059-2fb1c088a742 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.282906] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 926.282906] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522813da-ec43-1f6b-45a0-9e71f5a6e7f9" [ 926.282906] env[69982]: _type = "Task" [ 926.282906] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.288941] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c833a84-fb97-461c-ad7a-7eeeabf0a753 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.302151] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522813da-ec43-1f6b-45a0-9e71f5a6e7f9, 'name': SearchDatastore_Task, 'duration_secs': 0.013539} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.305500] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb505b7a-1b58-4094-b1e2-ac9c70e78f13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.309204] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.309581] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ff2c680a-211a-44ad-b00d-1037f1fcb856.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.316452] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9c2e73e-d2f1-4126-9818-9b5e319bada3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.316452] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073042} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.317019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 926.318238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee309a98-1b01-4300-89a5-7ef74d8a22be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.350541] env[69982]: DEBUG nova.network.neutron [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.353334] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bef2db-fb30-47ba-a824-2298ba922cbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.357017] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 926.357017] env[69982]: value = "task-3864829" [ 926.357017] env[69982]: _type = "Task" [ 926.357017] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.377158] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 67613f71-a91e-4dae-8a6c-cd74c4821339/67613f71-a91e-4dae-8a6c-cd74c4821339.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 926.382335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0ed58d7-523d-4df6-b44e-b5850e08357b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.404089] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6c522f-2038-45bb-960a-5f2edadc6caf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.416267] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.416685] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 926.416685] env[69982]: value = "task-3864830" [ 926.416685] env[69982]: _type = "Task" [ 926.416685] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.432445] env[69982]: DEBUG nova.compute.provider_tree [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.444429] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.455031] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Releasing lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.455210] env[69982]: DEBUG nova.compute.manager [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-vif-plugged-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.455325] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.455594] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.455773] env[69982]: DEBUG oslo_concurrency.lockutils [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.455971] env[69982]: DEBUG nova.compute.manager [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] No waiting events found dispatching network-vif-plugged-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.456166] env[69982]: WARNING nova.compute.manager [req-63afec60-da98-456a-8db9-ddcd1a634eda req-bc305c9e-ae52-41ad-8ea4-8822e32c259a service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received unexpected event network-vif-plugged-4e8435e5-b58d-4662-bc23-35592b41251f for instance with vm_state building and task_state spawning. [ 926.474511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.644507] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 926.681021] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 926.682100] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.682100] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 926.682100] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.682100] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 926.682100] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 926.682331] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 926.682550] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 926.682671] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 926.682814] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 926.683190] env[69982]: DEBUG nova.virt.hardware [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 926.683987] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8913e2b9-c81b-4e5b-8a0f-403612f33e11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.693815] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9cb6e9-4e14-4954-a57a-efad97f048cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.729916] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864828, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.738075] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 1bdb1577-cc35-4839-8992-ae3b4ab87eb2] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 926.868582] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.868955] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Instance network_info: |[{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 926.870242] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:09:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39cd75b0-9ec7-48ed-b57f-34da0c573a60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e8435e5-b58d-4662-bc23-35592b41251f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.880780] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 926.884328] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.884733] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a49e4d7-2fb1-4fd8-a17f-14fd8adf2c68 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.911133] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528281} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.915128] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ff2c680a-211a-44ad-b00d-1037f1fcb856.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.915456] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.915742] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.915742] env[69982]: value = "task-3864831" [ 926.915742] env[69982]: _type = "Task" [ 926.915742] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.915939] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb22ad09-7236-44b7-a835-9399b4779784 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.935962] env[69982]: DEBUG nova.scheduler.client.report [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.939651] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864831, 'name': CreateVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.948023] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 926.948023] env[69982]: value = "task-3864832" [ 926.948023] env[69982]: _type = "Task" [ 926.948023] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.948023] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.958238] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.102478] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.102746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.103991] env[69982]: DEBUG nova.compute.manager [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.104193] env[69982]: DEBUG nova.compute.manager [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing instance network info cache due to event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 927.104491] env[69982]: DEBUG oslo_concurrency.lockutils [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.104552] env[69982]: DEBUG oslo_concurrency.lockutils [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.104796] env[69982]: DEBUG nova.network.neutron [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 927.232177] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864828, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.250028] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d0114728-9d44-4700-86a9-175e5f840b1d] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 927.438436] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864831, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.440304] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864830, 'name': ReconfigVM_Task, 'duration_secs': 0.606056} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.441229] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.832s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.441798] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 927.444826] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 67613f71-a91e-4dae-8a6c-cd74c4821339/67613f71-a91e-4dae-8a6c-cd74c4821339.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 927.447042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.359s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.447042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.448647] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.895s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.453538] env[69982]: INFO nova.compute.claims [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 927.453538] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-347c44fe-34aa-460c-8eb7-b503042794cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.473299] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 927.473299] env[69982]: value = "task-3864833" [ 927.473299] env[69982]: _type = "Task" [ 927.473299] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.478481] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083015} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.484395] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.485748] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762b88f1-d483-4538-b20c-0b1ca9b7ce32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.512417] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ff2c680a-211a-44ad-b00d-1037f1fcb856.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.513870] env[69982]: INFO nova.scheduler.client.report [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Deleted allocations for instance b3aeaa98-724b-4563-aeaf-a089906eb0eb [ 927.523063] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e12f01aa-376e-46f9-b48c-0c505d2699c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.547247] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864833, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.557160] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 927.557160] env[69982]: value = "task-3864834" [ 927.557160] env[69982]: _type = "Task" [ 927.557160] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.567602] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.727574] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864828, 'name': CreateSnapshot_Task, 'duration_secs': 1.088063} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.727859] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 927.730065] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da03addf-6dbb-46d5-b45b-c5c0ed47e5b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.753998] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 3dcf60cd-c56d-4a91-9302-1b2e8d0ef6b2] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 927.804518] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Successfully updated port: c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.913528] env[69982]: DEBUG nova.network.neutron [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updated VIF entry in instance network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 927.913956] env[69982]: DEBUG nova.network.neutron [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.929499] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864831, 'name': CreateVM_Task, 'duration_secs': 0.626876} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.929781] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.930643] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.930878] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.931221] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.931813] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3222122-cfb9-4251-90f1-7d19740e84bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.938963] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 927.938963] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5207b21b-4a63-1215-e30d-a19342742f4a" [ 927.938963] env[69982]: _type = "Task" [ 927.938963] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.947924] env[69982]: DEBUG nova.compute.utils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 927.953936] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 927.954193] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 927.956430] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5207b21b-4a63-1215-e30d-a19342742f4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.989612] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864833, 'name': Rename_Task, 'duration_secs': 0.38924} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.989977] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 927.990290] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ab4edcc-c7b6-41ff-a3c7-b5178230738c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.998700] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 927.998700] env[69982]: value = "task-3864835" [ 927.998700] env[69982]: _type = "Task" [ 927.998700] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.011133] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.020110] env[69982]: DEBUG nova.policy [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2d1767ce6104441bdbe728b7a2e58a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fd9d77e845b44c2b8c488bac205c034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 928.055042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ead62554-725a-46a5-8dc6-ae731fcb0ec9 tempest-ServerAddressesTestJSON-1722420377 tempest-ServerAddressesTestJSON-1722420377-project-member] Lock "b3aeaa98-724b-4563-aeaf-a089906eb0eb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.564s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.070197] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.257641] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 928.258664] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ac61e819-982f-4942-adbb-ef3e53065934 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.263373] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.263486] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances with incomplete migration {{(pid=69982) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 928.277652] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 928.277652] env[69982]: value = "task-3864836" [ 928.277652] env[69982]: _type = "Task" [ 928.277652] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.291282] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864836, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.307086] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.307255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.307413] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 928.416558] env[69982]: DEBUG oslo_concurrency.lockutils [req-3e2be785-6225-477b-acde-1d93536a8afc req-49b75dea-c1f0-4f5a-87bb-c4da290401d1 service nova] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.452824] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5207b21b-4a63-1215-e30d-a19342742f4a, 'name': SearchDatastore_Task, 'duration_secs': 0.029837} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.453239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.453501] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.453755] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.453952] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.454184] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.454552] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56e10ca2-33aa-4aa7-a1cf-efa7c0b44e7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.458392] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 928.465962] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.469831] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.469831] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d3366d7-b436-49b7-bd2f-bedf68f5281a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.474451] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 928.474451] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae475a-6fe6-5354-031b-d3d7c1545ca0" [ 928.474451] env[69982]: _type = "Task" [ 928.474451] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.485276] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae475a-6fe6-5354-031b-d3d7c1545ca0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.511325] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864835, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.577540] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864834, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.684909] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Successfully created port: 5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 928.768932] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.794454] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864836, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.878223] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 928.992500] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae475a-6fe6-5354-031b-d3d7c1545ca0, 'name': SearchDatastore_Task, 'duration_secs': 0.018576} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.999440] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-871d60ba-9495-4444-bacc-3e4397806f98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.022116] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 929.022116] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b2fcf7-937a-4cf4-b285-5be9cb42e426" [ 929.022116] env[69982]: _type = "Task" [ 929.022116] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.032102] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864835, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.048557] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b2fcf7-937a-4cf4-b285-5be9cb42e426, 'name': SearchDatastore_Task, 'duration_secs': 0.020831} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.051988] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.052343] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/331f218a-ad6b-4417-b56d-83113e0c92cb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.052948] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6419806e-172f-41ed-9f43-b6c7935f6531 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.062410] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 929.062410] env[69982]: value = "task-3864837" [ 929.062410] env[69982]: _type = "Task" [ 929.062410] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.085110] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.089926] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864834, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.128840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e6e9be-9c40-48d2-813a-c088c2b479ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.138661] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcbd1536-f193-49e5-b1d8-75e53a49c2a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.179259] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "a3e3106d-b7df-49c8-9341-a843977aefe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.180286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.180286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.180286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.180286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.183465] env[69982]: INFO nova.compute.manager [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Terminating instance [ 929.192036] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca890d3-629d-4c18-861a-40da3a0ee35e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.210275] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26db9483-19b0-4ba9-a107-2ecde6428ebe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.227447] env[69982]: DEBUG nova.compute.provider_tree [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.232688] env[69982]: DEBUG nova.compute.manager [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Received event network-vif-plugged-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.232983] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Acquiring lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.233528] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.233738] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.233957] env[69982]: DEBUG nova.compute.manager [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] No waiting events found dispatching network-vif-plugged-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 929.234318] env[69982]: WARNING nova.compute.manager [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Received unexpected event network-vif-plugged-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 for instance with vm_state building and task_state spawning. [ 929.234318] env[69982]: DEBUG nova.compute.manager [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Received event network-changed-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.234467] env[69982]: DEBUG nova.compute.manager [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Refreshing instance network info cache due to event network-changed-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 929.234646] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Acquiring lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.250226] env[69982]: DEBUG nova.network.neutron [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Updating instance_info_cache with network_info: [{"id": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "address": "fa:16:3e:1c:56:d1", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bd72f6-ab", "ovs_interfaceid": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.290818] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864836, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.478380] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 929.518618] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.519149] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.519221] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.519721] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.519721] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.519721] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.520038] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.520316] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.520400] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.521435] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.521435] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.521996] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7e9cd9-e04d-46ba-b364-bbf123452d11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.529835] env[69982]: DEBUG oslo_vmware.api [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864835, 'name': PowerOnVM_Task, 'duration_secs': 1.386813} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.535361] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.536419] env[69982]: INFO nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Took 11.41 seconds to spawn the instance on the hypervisor. [ 929.536419] env[69982]: DEBUG nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.538530] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad9fb32-b6e7-46cf-899d-c872db510cba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.546807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff8890e-6750-401a-9b95-f1ab69d9f4c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.593972] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864834, 'name': ReconfigVM_Task, 'duration_secs': 1.799923} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.597114] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Reconfigured VM instance instance-00000042 to attach disk [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ff2c680a-211a-44ad-b00d-1037f1fcb856.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.597895] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69982) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 929.599292] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864837, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.599292] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-d377cd80-96f2-48aa-8e29-1fbe2f720a16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.616032] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 929.616032] env[69982]: value = "task-3864838" [ 929.616032] env[69982]: _type = "Task" [ 929.616032] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.627733] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864838, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.700906] env[69982]: DEBUG nova.compute.manager [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 929.701333] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 929.702327] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77695fd-8cc4-4766-8172-b54ad709ce25 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.714348] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.714732] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f6f4315-3c9f-4433-8fe5-f8a0da090194 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.726085] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 929.726085] env[69982]: value = "task-3864839" [ 929.726085] env[69982]: _type = "Task" [ 929.726085] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.742189] env[69982]: DEBUG nova.scheduler.client.report [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.753347] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864839, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.754548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.755295] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Instance network_info: |[{"id": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "address": "fa:16:3e:1c:56:d1", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bd72f6-ab", "ovs_interfaceid": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 929.756686] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Acquired lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.756904] env[69982]: DEBUG nova.network.neutron [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Refreshing network info cache for port c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 929.759675] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:56:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.769878] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.772016] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.774194] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95f34007-c042-470b-ad14-30cf517a5cdc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.804572] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864836, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.806489] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.806489] env[69982]: value = "task-3864840" [ 929.806489] env[69982]: _type = "Task" [ 929.806489] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.818776] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864840, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.069216] env[69982]: INFO nova.compute.manager [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Took 59.13 seconds to build instance. [ 930.083535] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644905} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.083936] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/331f218a-ad6b-4417-b56d-83113e0c92cb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.084257] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.084568] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01b10f82-d826-4ee8-be1b-712c35cca1d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.093474] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 930.093474] env[69982]: value = "task-3864841" [ 930.093474] env[69982]: _type = "Task" [ 930.093474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.104557] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.128672] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864838, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.127879} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.129154] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69982) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 930.130233] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b9058b-1ffb-4f5d-870b-199beb994aa8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.161756] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ephemeral_0.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.162212] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-80d71b83-63ff-4bee-9355-9cee1d282053 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.183699] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 930.183699] env[69982]: value = "task-3864842" [ 930.183699] env[69982]: _type = "Task" [ 930.183699] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.194874] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.241056] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864839, 'name': PowerOffVM_Task, 'duration_secs': 0.388485} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.241056] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.241056] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 930.241056] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0968e701-a198-4fe4-b5fe-d8416d1eb2ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.250353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.799s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.250353] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 930.251551] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 35.337s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.304818] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864836, 'name': CloneVM_Task, 'duration_secs': 1.725372} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.307021] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Created linked-clone VM from snapshot [ 930.307021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85b098b-cdbe-4ace-a0cf-8806b42a502b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.325482] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Uploading image b87792c6-b066-467e-ada6-8dd52594cca2 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 930.332830] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864840, 'name': CreateVM_Task, 'duration_secs': 0.521224} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.332830] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.333726] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.333954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.334398] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 930.334728] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecc933c0-7c16-4e63-bc4a-d3b208e341b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.341616] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 930.341616] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524585c7-78df-423b-f539-45e069bf808a" [ 930.341616] env[69982]: _type = "Task" [ 930.341616] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.353197] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524585c7-78df-423b-f539-45e069bf808a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.356633] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 930.356633] env[69982]: value = "vm-767993" [ 930.356633] env[69982]: _type = "VirtualMachine" [ 930.356633] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 930.356944] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-00075d22-f1fc-453f-a716-6ab6694da211 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.368287] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease: (returnval){ [ 930.368287] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52002bc1-0cb3-429b-a7db-010a4db2cdc8" [ 930.368287] env[69982]: _type = "HttpNfcLease" [ 930.368287] env[69982]: } obtained for exporting VM: (result){ [ 930.368287] env[69982]: value = "vm-767993" [ 930.368287] env[69982]: _type = "VirtualMachine" [ 930.368287] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 930.368587] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the lease: (returnval){ [ 930.368587] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52002bc1-0cb3-429b-a7db-010a4db2cdc8" [ 930.368587] env[69982]: _type = "HttpNfcLease" [ 930.368587] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 930.377339] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.377339] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52002bc1-0cb3-429b-a7db-010a4db2cdc8" [ 930.377339] env[69982]: _type = "HttpNfcLease" [ 930.377339] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 930.459362] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 930.459573] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 930.459769] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Deleting the datastore file [datastore2] a3e3106d-b7df-49c8-9341-a843977aefe4 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 930.460059] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8cbe886-b17d-4973-8f2f-49680f7289a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.471245] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for the task: (returnval){ [ 930.471245] env[69982]: value = "task-3864845" [ 930.471245] env[69982]: _type = "Task" [ 930.471245] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.481753] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.572161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d066f1d3-9af6-4cff-909a-c76b6d8a2543 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.915s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.606496] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097395} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.606957] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.607882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6071454c-fda9-4147-a8ce-7031218838fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.636202] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/331f218a-ad6b-4417-b56d-83113e0c92cb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.637085] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b00a9daa-700d-44c1-be8c-6a72fcfb0918 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.661702] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 930.661702] env[69982]: value = "task-3864846" [ 930.661702] env[69982]: _type = "Task" [ 930.661702] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.667048] env[69982]: DEBUG nova.network.neutron [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Updated VIF entry in instance network info cache for port c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 930.667782] env[69982]: DEBUG nova.network.neutron [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Updating instance_info_cache with network_info: [{"id": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "address": "fa:16:3e:1c:56:d1", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bd72f6-ab", "ovs_interfaceid": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.676757] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864846, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.697974] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864842, 'name': ReconfigVM_Task, 'duration_secs': 0.447095} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.698915] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Reconfigured VM instance instance-00000042 to attach disk [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856/ephemeral_0.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 930.700174] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9d81491-4a3e-457c-91b5-e8d197cc9556 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.711028] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 930.711028] env[69982]: value = "task-3864847" [ 930.711028] env[69982]: _type = "Task" [ 930.711028] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.721888] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864847, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.758009] env[69982]: DEBUG nova.compute.utils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 930.763135] env[69982]: INFO nova.compute.claims [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 930.767154] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 930.767718] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 930.854100] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524585c7-78df-423b-f539-45e069bf808a, 'name': SearchDatastore_Task, 'duration_secs': 0.017198} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.854260] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.854506] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.854776] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.854933] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.856450] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.856450] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2a68b59-c11b-4e7f-b775-965496ab2f34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.868734] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.869118] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.870946] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1b270b0-20d6-472d-8f44-6cdc8d19af59 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.879665] env[69982]: DEBUG nova.policy [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2d1767ce6104441bdbe728b7a2e58a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fd9d77e845b44c2b8c488bac205c034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 930.885432] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 930.885432] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e7d2d-819d-f690-9b12-6da744cfefab" [ 930.885432] env[69982]: _type = "Task" [ 930.885432] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.887396] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 930.887396] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52002bc1-0cb3-429b-a7db-010a4db2cdc8" [ 930.887396] env[69982]: _type = "HttpNfcLease" [ 930.887396] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 930.890703] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 930.890703] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52002bc1-0cb3-429b-a7db-010a4db2cdc8" [ 930.890703] env[69982]: _type = "HttpNfcLease" [ 930.890703] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 930.891629] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b681601f-9f42-4892-b93c-eecf602d70fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.904282] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e7d2d-819d-f690-9b12-6da744cfefab, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.913311] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 930.916853] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 930.916853] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc703044-feaf-4f5b-a956-70cc2acdd7c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.989620] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 930.989620] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cf3298-0df2-4a62-bdbd-fe020bf9e17c" [ 930.989620] env[69982]: _type = "Task" [ 930.989620] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.990998] env[69982]: DEBUG nova.compute.manager [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Received event network-vif-plugged-5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.991343] env[69982]: DEBUG oslo_concurrency.lockutils [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] Acquiring lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.991581] env[69982]: DEBUG oslo_concurrency.lockutils [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.991757] env[69982]: DEBUG oslo_concurrency.lockutils [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.991929] env[69982]: DEBUG nova.compute.manager [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] No waiting events found dispatching network-vif-plugged-5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 930.992621] env[69982]: WARNING nova.compute.manager [req-c54238cd-2bcb-4800-b15f-3311df8184dc req-69e98bdd-a682-46bb-a620-807d282f9086 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Received unexpected event network-vif-plugged-5c467e3e-6a47-4c32-b107-2c1b776e27ea for instance with vm_state building and task_state spawning. [ 930.994873] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Successfully updated port: 5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.007838] env[69982]: DEBUG oslo_vmware.api [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Task: {'id': task-3864845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378137} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.008009] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 931.008981] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 931.008981] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 931.009303] env[69982]: INFO nova.compute.manager [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Took 1.31 seconds to destroy the instance on the hypervisor. [ 931.009303] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.010158] env[69982]: DEBUG nova.compute.manager [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 931.010255] env[69982]: DEBUG nova.network.neutron [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 931.017500] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cf3298-0df2-4a62-bdbd-fe020bf9e17c, 'name': SearchDatastore_Task, 'duration_secs': 0.030586} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.018262] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.018525] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa/96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.018867] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28b0f47e-5db9-4f13-93a3-6813ddb602ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.028130] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 931.028130] env[69982]: value = "task-3864848" [ 931.028130] env[69982]: _type = "Task" [ 931.028130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.041334] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864848, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.071921] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-47d8f51f-3113-4dd2-bd72-6975a41b8b60 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.075199] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 931.170727] env[69982]: DEBUG oslo_concurrency.lockutils [req-454c0b84-795b-4a82-8152-751751d5c1cd req-0bbacf8a-db1e-4350-883b-437525b7cc41 service nova] Releasing lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.175817] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864846, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.208175] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 931.209243] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92df1def-8898-49de-8225-72e6e9a0eaa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.219285] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 931.219466] env[69982]: ERROR oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk due to incomplete transfer. [ 931.223303] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-dac3dd5d-3163-47f5-89cd-5a3ed2a2c277 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.225151] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864847, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.232833] env[69982]: DEBUG oslo_vmware.rw_handles [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52cf4bbe-58ba-7553-e5e0-37d75247d1f7/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 931.233053] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Uploaded image a60d2423-c997-4c2a-935c-cc8146777620 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 931.235332] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 931.235745] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b70413c6-c4c3-42de-992b-cb45e3ed47ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.246388] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 931.246388] env[69982]: value = "task-3864849" [ 931.246388] env[69982]: _type = "Task" [ 931.246388] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.255518] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864849, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.271546] env[69982]: INFO nova.compute.resource_tracker [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating resource usage from migration 18bb5912-7e73-47de-bffe-7728a8253c69 [ 931.274728] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 931.458151] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Successfully created port: 6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 931.498299] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.498458] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.498613] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 931.541420] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864848, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.541945] env[69982]: DEBUG oslo_concurrency.lockutils [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "interface-67613f71-a91e-4dae-8a6c-cd74c4821339-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.542228] env[69982]: DEBUG oslo_concurrency.lockutils [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "interface-67613f71-a91e-4dae-8a6c-cd74c4821339-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.542505] env[69982]: DEBUG nova.objects.instance [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lazy-loading 'flavor' on Instance uuid 67613f71-a91e-4dae-8a6c-cd74c4821339 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.607280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.677841] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864846, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.724680] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864847, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.761029] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864849, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.762045] env[69982]: DEBUG nova.network.neutron [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.950648] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-083a5bc1-5cd7-4825-92d1-33cee64e9e51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.966218] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83edcfa3-d539-454f-af46-141da7035a9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.015627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838782a0-ba9b-48a2-8c3d-fab53bdd9d3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.025437] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c534699-b60b-4668-a4d9-cc39a608b021 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.043291] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864848, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.955019} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.052432] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa/96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.052826] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.054894] env[69982]: DEBUG nova.objects.instance [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lazy-loading 'pci_requests' on Instance uuid 67613f71-a91e-4dae-8a6c-cd74c4821339 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.057977] env[69982]: DEBUG nova.compute.provider_tree [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.059864] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 932.061802] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-153f96e0-c97b-4940-9c3a-2128ac8408df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.071473] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 932.071473] env[69982]: value = "task-3864850" [ 932.071473] env[69982]: _type = "Task" [ 932.071473] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.085453] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.180874] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864846, 'name': ReconfigVM_Task, 'duration_secs': 1.030776} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.180874] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/331f218a-ad6b-4417-b56d-83113e0c92cb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.181472] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c68cd95-b8aa-4e23-9df5-b71b6b919d1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.191173] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 932.191173] env[69982]: value = "task-3864851" [ 932.191173] env[69982]: _type = "Task" [ 932.191173] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.206032] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864851, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.227170] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864847, 'name': Rename_Task, 'duration_secs': 1.230773} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.227574] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.227871] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ceebbd77-3d6b-4e38-be62-0435dec971e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.236628] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 932.236628] env[69982]: value = "task-3864852" [ 932.236628] env[69982]: _type = "Task" [ 932.236628] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.246923] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864852, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.259808] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864849, 'name': Destroy_Task, 'duration_secs': 0.767917} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.260255] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Destroyed the VM [ 932.260600] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 932.261242] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-36e87bf7-9733-424d-9ccf-f6eddcd2c938 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.267441] env[69982]: INFO nova.compute.manager [-] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Took 1.26 seconds to deallocate network for instance. [ 932.271842] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 932.271842] env[69982]: value = "task-3864853" [ 932.271842] env[69982]: _type = "Task" [ 932.271842] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.288886] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864853, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.295856] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 932.301088] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Updating instance_info_cache with network_info: [{"id": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "address": "fa:16:3e:67:f6:7e", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c467e3e-6a", "ovs_interfaceid": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.336073] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 932.336655] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 932.336994] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 932.337335] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 932.337805] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 932.338085] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 932.338415] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 932.338658] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 932.338917] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 932.340563] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 932.340563] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 932.340733] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792a9c23-b7f7-4d31-be43-69f752f700b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.354542] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa896595-2ff7-4abf-8f13-9bbf2e0285f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.562961] env[69982]: DEBUG nova.objects.base [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Object Instance<67613f71-a91e-4dae-8a6c-cd74c4821339> lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 932.562961] env[69982]: DEBUG nova.network.neutron [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.569023] env[69982]: DEBUG nova.scheduler.client.report [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.585028] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077201} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.585028] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.585432] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f824e6b1-72af-40fa-9c01-4dda7f548503 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.617715] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa/96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.622276] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f064e818-745c-4f15-a1be-824a13a4c567 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.645598] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 932.645598] env[69982]: value = "task-3864854" [ 932.645598] env[69982]: _type = "Task" [ 932.645598] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.657917] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864854, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.683200] env[69982]: DEBUG oslo_concurrency.lockutils [None req-434e8398-a8d4-4dcd-9455-56146efea27e tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "interface-67613f71-a91e-4dae-8a6c-cd74c4821339-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.140s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.704759] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864851, 'name': Rename_Task, 'duration_secs': 0.236877} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.705134] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.705514] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf0619f7-4e92-4f7a-a015-3998af69787d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.715696] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 932.715696] env[69982]: value = "task-3864855" [ 932.715696] env[69982]: _type = "Task" [ 932.715696] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.727674] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.750694] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864852, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.782046] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.788379] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864853, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.804401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 932.805272] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Instance network_info: |[{"id": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "address": "fa:16:3e:67:f6:7e", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c467e3e-6a", "ovs_interfaceid": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 932.806170] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:f6:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c467e3e-6a47-4c32-b107-2c1b776e27ea', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 932.823882] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Creating folder: Project (0fd9d77e845b44c2b8c488bac205c034). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 932.824489] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d5e2b1c-3dd4-41b9-b42d-e775d0742954 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.838164] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Created folder: Project (0fd9d77e845b44c2b8c488bac205c034) in parent group-v767796. [ 932.838539] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Creating folder: Instances. Parent ref: group-v767995. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 932.838883] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-341b7b3b-5041-4cbb-9107-bccdc54a3036 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.851930] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Created folder: Instances in parent group-v767995. [ 932.852406] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 932.853030] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 932.853130] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbbb8d12-e88f-445a-ab66-7f721530caa5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.882440] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 932.882440] env[69982]: value = "task-3864858" [ 932.882440] env[69982]: _type = "Task" [ 932.882440] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.894106] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864858, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.072452] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.821s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.072725] env[69982]: INFO nova.compute.manager [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Migrating [ 933.080075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.173s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.081740] env[69982]: INFO nova.compute.claims [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 933.161414] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864854, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.234292] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864855, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.256837] env[69982]: DEBUG oslo_vmware.api [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864852, 'name': PowerOnVM_Task, 'duration_secs': 0.764515} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.257173] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.258743] env[69982]: INFO nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Took 11.81 seconds to spawn the instance on the hypervisor. [ 933.258743] env[69982]: DEBUG nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.258743] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302f06cc-e014-46bd-a62c-8b00229f0d3f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.291444] env[69982]: DEBUG oslo_vmware.api [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864853, 'name': RemoveSnapshot_Task, 'duration_secs': 1.019286} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.291444] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 933.291444] env[69982]: INFO nova.compute.manager [None req-df40929e-0b22-45c5-a2f3-6a98239a3ba6 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 18.51 seconds to snapshot the instance on the hypervisor. [ 933.394349] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864858, 'name': CreateVM_Task, 'duration_secs': 0.399273} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.397197] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.397197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.397197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.397197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.397197] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3de081d-deba-465b-b6e4-3e8cb83d3166 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.403065] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 933.403065] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521cd720-100f-9d61-a787-0e040f5eeb20" [ 933.403065] env[69982]: _type = "Task" [ 933.403065] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.413047] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521cd720-100f-9d61-a787-0e040f5eeb20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.436595] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Successfully updated port: 6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 933.596827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.596827] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.596827] env[69982]: DEBUG nova.network.neutron [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 933.657584] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864854, 'name': ReconfigVM_Task, 'duration_secs': 0.521429} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.657871] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa/96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.658601] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab9a0c52-1d86-4bd7-95a0-484b2d228ad1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.666329] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 933.666329] env[69982]: value = "task-3864859" [ 933.666329] env[69982]: _type = "Task" [ 933.666329] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.676159] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864859, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.679315] env[69982]: DEBUG nova.compute.manager [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Received event network-changed-5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.679315] env[69982]: DEBUG nova.compute.manager [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Refreshing instance network info cache due to event network-changed-5c467e3e-6a47-4c32-b107-2c1b776e27ea. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 933.679393] env[69982]: DEBUG oslo_concurrency.lockutils [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] Acquiring lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.679563] env[69982]: DEBUG oslo_concurrency.lockutils [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] Acquired lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.679741] env[69982]: DEBUG nova.network.neutron [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Refreshing network info cache for port 5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 933.727688] env[69982]: DEBUG oslo_vmware.api [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3864855, 'name': PowerOnVM_Task, 'duration_secs': 0.780012} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.727908] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.728091] env[69982]: INFO nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Took 9.96 seconds to spawn the instance on the hypervisor. [ 933.728290] env[69982]: DEBUG nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.729171] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff4c21d-87ae-4ac9-b075-375419e47097 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.782633] env[69982]: INFO nova.compute.manager [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Took 58.15 seconds to build instance. [ 933.836071] env[69982]: DEBUG nova.compute.manager [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Received event network-vif-plugged-6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 933.836340] env[69982]: DEBUG oslo_concurrency.lockutils [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] Acquiring lock "8358b105-7276-4292-804d-534f9fb1535e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.836636] env[69982]: DEBUG oslo_concurrency.lockutils [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] Lock "8358b105-7276-4292-804d-534f9fb1535e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.836743] env[69982]: DEBUG oslo_concurrency.lockutils [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] Lock "8358b105-7276-4292-804d-534f9fb1535e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.836913] env[69982]: DEBUG nova.compute.manager [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] No waiting events found dispatching network-vif-plugged-6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 933.837100] env[69982]: WARNING nova.compute.manager [req-388bc007-c62b-4721-84b8-baa95b2771c9 req-92c9517b-d10a-4124-afdd-3e9b5546baba service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Received unexpected event network-vif-plugged-6e14b78a-b061-48f2-ad9b-6430822d4a48 for instance with vm_state building and task_state spawning. [ 933.915420] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521cd720-100f-9d61-a787-0e040f5eeb20, 'name': SearchDatastore_Task, 'duration_secs': 0.012977} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.915750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.916018] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.916273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.916481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.916634] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.916993] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90b52141-4b9b-463c-a8ad-d9112ab48fca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.928673] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.928888] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.929839] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f30a8f1-3d36-4e5a-9d69-08d1955416be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.936509] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 933.936509] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d33065-0879-3ca5-2570-15c55df6d2e2" [ 933.936509] env[69982]: _type = "Task" [ 933.936509] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.940596] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.940738] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.940878] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 933.948141] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d33065-0879-3ca5-2570-15c55df6d2e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.177876] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864859, 'name': Rename_Task, 'duration_secs': 0.268822} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.178253] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.178547] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bcfcf13-aeb9-4338-b981-be31acbc39bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.186882] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 934.186882] env[69982]: value = "task-3864860" [ 934.186882] env[69982]: _type = "Task" [ 934.186882] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.205017] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864860, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.255682] env[69982]: INFO nova.compute.manager [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Took 51.35 seconds to build instance. [ 934.286692] env[69982]: DEBUG oslo_concurrency.lockutils [None req-afa79e65-f628-4986-ae17-d6cfbeb6c352 tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.606s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.462805] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d33065-0879-3ca5-2570-15c55df6d2e2, 'name': SearchDatastore_Task, 'duration_secs': 0.023361} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.466893] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5022d07-6c6d-46c6-96d5-23ea49e8eed9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.475068] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 934.475068] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52427135-63c1-a1fd-25fd-0caa6b454bd2" [ 934.475068] env[69982]: _type = "Task" [ 934.475068] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.488873] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52427135-63c1-a1fd-25fd-0caa6b454bd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.493899] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 934.606831] env[69982]: DEBUG nova.network.neutron [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.704117] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864860, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.733207] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd4d95a-a596-47c5-ad86-e78e83aaec52 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.741976] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b649b3-b38c-4c0a-a1e2-c103ac1e80bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.780462] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1f74b7a1-9275-48f1-97a3-fa93effceaaa tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.890s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.782427] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Updating instance_info_cache with network_info: [{"id": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "address": "fa:16:3e:cc:50:8a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e14b78a-b0", "ovs_interfaceid": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.785251] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df065b2-724b-4ddb-b969-fe0e797cb175 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.793986] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 934.804027] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6f4181-c2d5-4357-8530-7f953832ba99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.821932] env[69982]: DEBUG nova.compute.provider_tree [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.874784] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "67613f71-a91e-4dae-8a6c-cd74c4821339" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.875161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.875429] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.875692] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.875927] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.878580] env[69982]: INFO nova.compute.manager [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Terminating instance [ 934.987070] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52427135-63c1-a1fd-25fd-0caa6b454bd2, 'name': SearchDatastore_Task, 'duration_secs': 0.016894} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.987389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.987685] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 10a4294b-39ce-4643-98b5-71ac283f05f5/10a4294b-39ce-4643-98b5-71ac283f05f5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.987957] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32e07df2-ab15-49f0-ac3e-9c1c9bc02fd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.995872] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 934.995872] env[69982]: value = "task-3864861" [ 934.995872] env[69982]: _type = "Task" [ 934.995872] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.008047] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864861, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.110415] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.207704] env[69982]: DEBUG oslo_vmware.api [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864860, 'name': PowerOnVM_Task, 'duration_secs': 0.780688} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.208057] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.208302] env[69982]: INFO nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Took 8.57 seconds to spawn the instance on the hypervisor. [ 935.208500] env[69982]: DEBUG nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.209380] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f597c5-734a-4f7f-8888-9e90d11bf823 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.213569] env[69982]: DEBUG nova.network.neutron [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Updated VIF entry in instance network info cache for port 5c467e3e-6a47-4c32-b107-2c1b776e27ea. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 935.217485] env[69982]: DEBUG nova.network.neutron [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Updating instance_info_cache with network_info: [{"id": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "address": "fa:16:3e:67:f6:7e", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c467e3e-6a", "ovs_interfaceid": "5c467e3e-6a47-4c32-b107-2c1b776e27ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.295126] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 935.301384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.301384] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Instance network_info: |[{"id": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "address": "fa:16:3e:cc:50:8a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e14b78a-b0", "ovs_interfaceid": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 935.305171] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:50:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e14b78a-b061-48f2-ad9b-6430822d4a48', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 935.315189] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.317901] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 935.319277] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f2430f1-8576-4ebf-aa16-1802360d3e84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.344316] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.346133] env[69982]: DEBUG nova.scheduler.client.report [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.358215] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 935.358215] env[69982]: value = "task-3864862" [ 935.358215] env[69982]: _type = "Task" [ 935.358215] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.370169] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864862, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.383554] env[69982]: DEBUG nova.compute.manager [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.383806] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.385232] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e608b80-2dbd-41a0-ba85-b5fc11bb4257 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.396869] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.397278] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a389ff23-25f1-4fce-b268-0505ed475ac5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.404852] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 935.404852] env[69982]: value = "task-3864863" [ 935.404852] env[69982]: _type = "Task" [ 935.404852] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.417976] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864863, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.511894] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864861, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.720766] env[69982]: DEBUG oslo_concurrency.lockutils [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] Releasing lock "refresh_cache-10a4294b-39ce-4643-98b5-71ac283f05f5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.721286] env[69982]: DEBUG nova.compute.manager [req-9e1d2082-46ab-4b5f-8783-5cbff1d1bc92 req-34b7aeed-b75f-416d-94b0-0858a33dce2f service nova] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Received event network-vif-deleted-a4b3377a-104c-41a0-968e-96828e5b1313 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.742525] env[69982]: INFO nova.compute.manager [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Took 50.71 seconds to build instance. [ 935.835832] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.852141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.772s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.852935] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 935.855327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.086s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.855550] env[69982]: DEBUG nova.objects.instance [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lazy-loading 'resources' on Instance uuid 8b812422-4ca6-4d2b-b6af-873fdb21fab6 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 935.870458] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864862, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.903488] env[69982]: DEBUG nova.compute.manager [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Received event network-changed-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.903697] env[69982]: DEBUG nova.compute.manager [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Refreshing instance network info cache due to event network-changed-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 935.903919] env[69982]: DEBUG oslo_concurrency.lockutils [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] Acquiring lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.904076] env[69982]: DEBUG oslo_concurrency.lockutils [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] Acquired lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.904250] env[69982]: DEBUG nova.network.neutron [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Refreshing network info cache for port ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 935.919439] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864863, 'name': PowerOffVM_Task, 'duration_secs': 0.330636} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.920496] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.920682] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.920954] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c85527d-a9da-4902-8b16-3cc89428d695 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.927063] env[69982]: DEBUG nova.compute.manager [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Received event network-changed-6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.927063] env[69982]: DEBUG nova.compute.manager [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Refreshing instance network info cache due to event network-changed-6e14b78a-b061-48f2-ad9b-6430822d4a48. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 935.927063] env[69982]: DEBUG oslo_concurrency.lockutils [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] Acquiring lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.927063] env[69982]: DEBUG oslo_concurrency.lockutils [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] Acquired lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.927063] env[69982]: DEBUG nova.network.neutron [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Refreshing network info cache for port 6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 935.991954] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.992276] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.992467] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Deleting the datastore file [datastore2] 67613f71-a91e-4dae-8a6c-cd74c4821339 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.992750] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba0ea241-d583-4df1-8a65-94c537cc7a4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.003246] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for the task: (returnval){ [ 936.003246] env[69982]: value = "task-3864865" [ 936.003246] env[69982]: _type = "Task" [ 936.003246] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.012019] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864861, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570817} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.012176] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 10a4294b-39ce-4643-98b5-71ac283f05f5/10a4294b-39ce-4643-98b5-71ac283f05f5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.012391] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.012715] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-470bfc26-9b49-4675-8fbf-c98f36248acd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.018818] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864865, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.024646] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 936.024646] env[69982]: value = "task-3864866" [ 936.024646] env[69982]: _type = "Task" [ 936.024646] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.034452] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.208886] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Acquiring lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.209121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Acquired lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.209305] env[69982]: DEBUG nova.network.neutron [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 936.245470] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f0a66728-9b98-4b79-b664-ae9b13247de3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.223s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.360718] env[69982]: DEBUG nova.compute.utils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 936.363767] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 936.363987] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 936.386247] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864862, 'name': CreateVM_Task, 'duration_secs': 0.528704} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.387277] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 936.388163] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.388485] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.388888] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 936.389569] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d022b7-17de-4e73-9680-b88f3da75c0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.398018] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 936.398018] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be8b99-5130-246b-0c36-b87aa56e2a23" [ 936.398018] env[69982]: _type = "Task" [ 936.398018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.413696] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be8b99-5130-246b-0c36-b87aa56e2a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.467408] env[69982]: DEBUG nova.policy [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e2d1767ce6104441bdbe728b7a2e58a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0fd9d77e845b44c2b8c488bac205c034', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 936.519895] env[69982]: DEBUG oslo_vmware.api [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Task: {'id': task-3864865, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304666} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.520380] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.520576] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.520756] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.520970] env[69982]: INFO nova.compute.manager [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Took 1.14 seconds to destroy the instance on the hypervisor. [ 936.521256] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.521909] env[69982]: DEBUG nova.compute.manager [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.521909] env[69982]: DEBUG nova.network.neutron [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 936.542560] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.114952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.543339] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.544430] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba90ecc3-2e46-4c03-a887-31efbe6e99ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.572465] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 10a4294b-39ce-4643-98b5-71ac283f05f5/10a4294b-39ce-4643-98b5-71ac283f05f5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.575699] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c203f05-5820-4daf-826c-7a702ea2759e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.604119] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 936.604119] env[69982]: value = "task-3864867" [ 936.604119] env[69982]: _type = "Task" [ 936.604119] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.616246] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.638558] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd56113c-2e4b-4d85-b0f1-4565de9d2611 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.658450] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.748273] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 936.877032] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 936.913360] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be8b99-5130-246b-0c36-b87aa56e2a23, 'name': SearchDatastore_Task, 'duration_secs': 0.022224} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.917420] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.917420] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 936.917506] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.917596] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.918055] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 936.918683] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d276de6-1f68-4149-bdf5-59a48bdd4a83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.929701] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 936.930021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 936.930991] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5b74113-efb5-41b3-b04b-7faa292861ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.943267] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 936.943267] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526c907c-62d2-bb9f-f85b-e3f3bd5c408e" [ 936.943267] env[69982]: _type = "Task" [ 936.943267] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.961610] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526c907c-62d2-bb9f-f85b-e3f3bd5c408e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.061310] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Successfully created port: 2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.071154] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f06d3e-bfa5-4738-8fd9-48ee3ddbb0d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.080078] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1c6801-c4a7-4256-9ae2-1f6a6399d21d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.089190] env[69982]: DEBUG nova.network.neutron [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Updated VIF entry in instance network info cache for port 6e14b78a-b061-48f2-ad9b-6430822d4a48. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 937.089190] env[69982]: DEBUG nova.network.neutron [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Updating instance_info_cache with network_info: [{"id": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "address": "fa:16:3e:cc:50:8a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e14b78a-b0", "ovs_interfaceid": "6e14b78a-b061-48f2-ad9b-6430822d4a48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.124752] env[69982]: DEBUG oslo_concurrency.lockutils [req-1e973173-369e-4b44-9a2a-d85aebd48cd5 req-5fdebe54-df81-4b27-b790-62e5921577f1 service nova] Releasing lock "refresh_cache-8358b105-7276-4292-804d-534f9fb1535e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.130015] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-749fe53d-e70b-4090-acb3-3e4ec869f610 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.135583] env[69982]: DEBUG nova.network.neutron [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updated VIF entry in instance network info cache for port ff1c200e-1846-4f71-8cf1-a64fa7ea9a92. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 937.135583] env[69982]: DEBUG nova.network.neutron [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updating instance_info_cache with network_info: [{"id": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "address": "fa:16:3e:45:c0:5c", "network": {"id": "353a0b4f-654e-4e88-bb23-d393e9e3c76b", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2006832196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fe14238478147f8bab643b39a1bcb34", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad72c645-a67d-4efd-b563-28e44077e68d", "external-id": "nsx-vlan-transportzone-201", "segmentation_id": 201, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff1c200e-18", "ovs_interfaceid": "ff1c200e-1846-4f71-8cf1-a64fa7ea9a92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.147712] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2da112f-8724-470b-9804-14e20f3cdb5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.152185] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864867, 'name': ReconfigVM_Task, 'duration_secs': 0.312152} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.152612] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 10a4294b-39ce-4643-98b5-71ac283f05f5/10a4294b-39ce-4643-98b5-71ac283f05f5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.154451] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-597a8805-067b-4883-9209-3be1f8306aeb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.168302] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.168518] env[69982]: DEBUG nova.compute.provider_tree [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.171266] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8d660b3-040d-4869-89e0-75f3d845129c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.173423] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 937.173423] env[69982]: value = "task-3864868" [ 937.173423] env[69982]: _type = "Task" [ 937.173423] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.183168] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 937.183168] env[69982]: value = "task-3864869" [ 937.183168] env[69982]: _type = "Task" [ 937.183168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.191249] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864868, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.197948] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864869, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.281581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.322854] env[69982]: DEBUG nova.network.neutron [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Updating instance_info_cache with network_info: [{"id": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "address": "fa:16:3e:1c:56:d1", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bd72f6-ab", "ovs_interfaceid": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.457643] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526c907c-62d2-bb9f-f85b-e3f3bd5c408e, 'name': SearchDatastore_Task, 'duration_secs': 0.027474} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.459114] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0af8ab0-36b4-49bd-bd0a-84d18a55d811 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.465973] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 937.465973] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e0f2e7-7a97-d588-344f-169ce30142c1" [ 937.465973] env[69982]: _type = "Task" [ 937.465973] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.482314] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e0f2e7-7a97-d588-344f-169ce30142c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.539771] env[69982]: DEBUG nova.network.neutron [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.638696] env[69982]: DEBUG oslo_concurrency.lockutils [req-93d2db90-e569-4a70-bf24-90eff03eead6 req-34533564-397d-4d96-930b-014add9f935b service nova] Releasing lock "refresh_cache-ff2c680a-211a-44ad-b00d-1037f1fcb856" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.677035] env[69982]: DEBUG nova.scheduler.client.report [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.691880] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864868, 'name': Rename_Task, 'duration_secs': 0.178673} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.692921] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 937.693366] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a1217e2-fc54-4145-b961-449f88730814 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.702576] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864869, 'name': PowerOffVM_Task, 'duration_secs': 0.308462} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.704310] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.704767] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 937.715150] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 937.715150] env[69982]: value = "task-3864870" [ 937.715150] env[69982]: _type = "Task" [ 937.715150] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.725348] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.828021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Releasing lock "refresh_cache-96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.828021] env[69982]: DEBUG nova.compute.manager [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Inject network info {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 937.828500] env[69982]: DEBUG nova.compute.manager [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] network_info to inject: |[{"id": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "address": "fa:16:3e:1c:56:d1", "network": {"id": "db0597ba-a4eb-4e94-b97e-05efc701209c", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1483061469-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5ba58648e534c55953c89a4eae7caf4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "51876cd6-d373-4edc-8595-254e5d631378", "external-id": "nsx-vlan-transportzone-916", "segmentation_id": 916, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2bd72f6-ab", "ovs_interfaceid": "c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 937.832362] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Reconfiguring VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 937.832895] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b7f0524-45f7-407c-8a67-12219364fe6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.858130] env[69982]: DEBUG oslo_vmware.api [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Waiting for the task: (returnval){ [ 937.858130] env[69982]: value = "task-3864871" [ 937.858130] env[69982]: _type = "Task" [ 937.858130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.867466] env[69982]: DEBUG oslo_vmware.api [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Task: {'id': task-3864871, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.889621] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 937.979573] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e0f2e7-7a97-d588-344f-169ce30142c1, 'name': SearchDatastore_Task, 'duration_secs': 0.034784} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.980319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.980556] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8358b105-7276-4292-804d-534f9fb1535e/8358b105-7276-4292-804d-534f9fb1535e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 937.980914] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e3ed89a-e734-4bad-808a-67efe254a6fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.989886] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 937.989886] env[69982]: value = "task-3864872" [ 937.989886] env[69982]: _type = "Task" [ 937.989886] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.005178] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864872, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.017089] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.017089] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.017304] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.017415] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.017579] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.017864] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.017992] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.018219] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.018461] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.018772] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.019288] env[69982]: DEBUG nova.virt.hardware [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.021734] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c42417e-0b24-4197-bb73-65cbbfd6930a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.030486] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e992ae-ddfd-427a-9839-e49db32c708f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.048501] env[69982]: INFO nova.compute.manager [-] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Took 1.53 seconds to deallocate network for instance. [ 938.082501] env[69982]: DEBUG nova.compute.manager [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.082890] env[69982]: DEBUG nova.compute.manager [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing instance network info cache due to event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 938.083495] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.083631] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.083847] env[69982]: DEBUG nova.network.neutron [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 938.186330] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.328s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.186899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.336s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.190670] env[69982]: INFO nova.compute.claims [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 938.217546] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 938.218271] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.218271] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 938.218610] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.218781] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 938.218943] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 938.219353] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 938.219605] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 938.219934] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 938.220097] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 938.220381] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 938.230421] env[69982]: INFO nova.scheduler.client.report [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleted allocations for instance 8b812422-4ca6-4d2b-b6af-873fdb21fab6 [ 938.232467] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbb8ecd8-3db1-4015-922b-adc4801aebb3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.269950] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864870, 'name': PowerOnVM_Task, 'duration_secs': 0.513238} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.272527] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.272527] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Took 8.79 seconds to spawn the instance on the hypervisor. [ 938.272527] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.272889] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 938.272889] env[69982]: value = "task-3864873" [ 938.272889] env[69982]: _type = "Task" [ 938.272889] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.274382] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5a7eb9-af1e-48ab-a200-f1cc0bdf5964 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.295913] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864873, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.371966] env[69982]: DEBUG oslo_vmware.api [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] Task: {'id': task-3864871, 'name': ReconfigVM_Task, 'duration_secs': 0.220904} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.372357] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3363073-621e-4281-98c6-d2455930163d tempest-ServersAdminTestJSON-781408600 tempest-ServersAdminTestJSON-781408600-project-admin] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Reconfigured VM instance to set the machine id {{(pid=69982) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 938.503998] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864872, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.561286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.766378] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bb1e0ee6-7f05-4d05-a423-422e63e6fd9f tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "8b812422-4ca6-4d2b-b6af-873fdb21fab6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.676s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.798910] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864873, 'name': ReconfigVM_Task, 'duration_secs': 0.51406} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.802736] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 938.807121] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Took 50.30 seconds to build instance. [ 938.836466] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Successfully updated port: 2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 938.852027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.852354] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.852583] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.852766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.852977] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.855957] env[69982]: INFO nova.compute.manager [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Terminating instance [ 938.952813] env[69982]: DEBUG nova.network.neutron [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updated VIF entry in instance network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 938.953326] env[69982]: DEBUG nova.network.neutron [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.001280] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864872, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.730734} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.001927] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8358b105-7276-4292-804d-534f9fb1535e/8358b105-7276-4292-804d-534f9fb1535e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 939.002070] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 939.002610] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a6e4800-cdd3-4ee2-8ffe-26a621c61256 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.010563] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 939.010563] env[69982]: value = "task-3864874" [ 939.010563] env[69982]: _type = "Task" [ 939.010563] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.020564] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864874, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.309962] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.310250] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.310597] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.310597] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.310752] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.310897] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.311151] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.311331] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.311496] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.311852] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.311852] env[69982]: DEBUG nova.virt.hardware [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.317880] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfiguring VM instance instance-0000003c to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 939.318934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.309s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.318934] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1106577-66cc-41a9-9e46-8e56f50af327 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.337516] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.337677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.337830] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 939.342090] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 939.342090] env[69982]: value = "task-3864875" [ 939.342090] env[69982]: _type = "Task" [ 939.342090] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.354881] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864875, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.360252] env[69982]: DEBUG nova.compute.manager [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.360545] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.361520] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16927bc-652b-46eb-a0bd-9ac4c40160af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.373316] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.373396] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38733655-577c-4caf-b83f-5597873f97aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.382597] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 939.382597] env[69982]: value = "task-3864876" [ 939.382597] env[69982]: _type = "Task" [ 939.382597] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.402451] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.456754] env[69982]: DEBUG oslo_concurrency.lockutils [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.457486] env[69982]: DEBUG nova.compute.manager [req-6cf5e454-98ba-45a2-91af-a10195397d04 req-b64da370-f4cd-4450-843b-b1103d30935d service nova] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Received event network-vif-deleted-3e3f4839-831a-4b7d-8026-14ba4c18d895 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.527184] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864874, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07167} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.530574] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 939.532154] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc1d6cb-610f-4d6c-989d-ca24f1acf4d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.564025] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] 8358b105-7276-4292-804d-534f9fb1535e/8358b105-7276-4292-804d-534f9fb1535e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.564720] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccb8fb37-97c6-4a83-a98c-3a63800ec9d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.594898] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 939.594898] env[69982]: value = "task-3864877" [ 939.594898] env[69982]: _type = "Task" [ 939.594898] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.608678] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.808341] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22520b44-fdaa-4616-9d81-410dbd64e311 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.816917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b3d9b68-8fa3-44e7-be79-9b4d8001bf31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.857546] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e00096e-d5e7-487e-9476-f55e9abba53f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.863140] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "8518f3c8-738d-468a-9f57-de50e4e67108" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.863386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.875657] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64209440-b048-4a55-a2ef-d108445c9f22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.880612] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864875, 'name': ReconfigVM_Task, 'duration_secs': 0.375229} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.881215] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfigured VM instance instance-0000003c to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 939.882615] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378257de-dbcf-457a-bd24-d25d9216de0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.894924] env[69982]: DEBUG nova.compute.provider_tree [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 939.905947] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864876, 'name': PowerOffVM_Task, 'duration_secs': 0.394451} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.917505] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.917706] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.926825] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 939.927153] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-67a69752-9c8f-46a8-bf6a-fbd7eb3a06fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.929613] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78976890-4164-4032-bc96-89310acbce67 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.953329] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 939.953329] env[69982]: value = "task-3864879" [ 939.953329] env[69982]: _type = "Task" [ 939.953329] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.964604] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.018221] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 940.018536] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 940.018721] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleting the datastore file [datastore2] 570675a8-3ec0-4fe6-b123-d3901d56b8cf {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 940.018981] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c7522b-c957-4fe4-85e5-122b6c706204 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.022733] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 940.026368] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 940.026368] env[69982]: value = "task-3864880" [ 940.026368] env[69982]: _type = "Task" [ 940.026368] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.036954] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.091185] env[69982]: INFO nova.compute.manager [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Rebuilding instance [ 940.106931] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.113297] env[69982]: DEBUG nova.compute.manager [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Received event network-vif-plugged-2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.113558] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Acquiring lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.113789] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.113949] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.114141] env[69982]: DEBUG nova.compute.manager [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] No waiting events found dispatching network-vif-plugged-2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 940.114319] env[69982]: WARNING nova.compute.manager [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Received unexpected event network-vif-plugged-2b95faf4-6ad3-4170-a93a-15d892a8ca46 for instance with vm_state building and task_state spawning. [ 940.114474] env[69982]: DEBUG nova.compute.manager [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Received event network-changed-2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.114630] env[69982]: DEBUG nova.compute.manager [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Refreshing instance network info cache due to event network-changed-2b95faf4-6ad3-4170-a93a-15d892a8ca46. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.114799] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Acquiring lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.147811] env[69982]: DEBUG nova.compute.manager [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.148797] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e93a988-b376-4499-833f-9cd2f5f8b83f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.192782] env[69982]: DEBUG nova.network.neutron [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Updating instance_info_cache with network_info: [{"id": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "address": "fa:16:3e:1d:9f:1a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b95faf4-6a", "ovs_interfaceid": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.367487] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 940.401684] env[69982]: DEBUG nova.scheduler.client.report [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.464411] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864879, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.536822] env[69982]: DEBUG oslo_vmware.api [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340629} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.537140] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.537336] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.537512] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.537685] env[69982]: INFO nova.compute.manager [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Took 1.18 seconds to destroy the instance on the hypervisor. [ 940.537928] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.538151] env[69982]: DEBUG nova.compute.manager [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.538258] env[69982]: DEBUG nova.network.neutron [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 940.612519] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864877, 'name': ReconfigVM_Task, 'duration_secs': 0.586847} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.612779] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Reconfigured VM instance instance-00000046 to attach disk [datastore2] 8358b105-7276-4292-804d-534f9fb1535e/8358b105-7276-4292-804d-534f9fb1535e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.613387] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e098066-f286-467c-9419-5ae4058af3d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.624560] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 940.624560] env[69982]: value = "task-3864881" [ 940.624560] env[69982]: _type = "Task" [ 940.624560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.641332] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864881, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.646014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.646318] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.646534] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.646717] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.646931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.649266] env[69982]: INFO nova.compute.manager [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Terminating instance [ 940.695696] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 940.696053] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Instance network_info: |[{"id": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "address": "fa:16:3e:1d:9f:1a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b95faf4-6a", "ovs_interfaceid": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 940.696424] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Acquired lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.696638] env[69982]: DEBUG nova.network.neutron [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Refreshing network info cache for port 2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 940.698381] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:9f:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e41c97-4d75-4041-ae71-321e7e9d480b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b95faf4-6ad3-4170-a93a-15d892a8ca46', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.714017] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.721776] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.722656] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e4ac4c2-9bb4-4145-8ada-31d0bceaf4f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.757185] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 940.757185] env[69982]: value = "task-3864882" [ 940.757185] env[69982]: _type = "Task" [ 940.757185] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.768588] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864882, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.858834] env[69982]: DEBUG nova.compute.manager [req-1c4016c6-816f-4b51-80fb-7869f6719a3f req-58426c7d-4eb4-4c04-b2e8-25e6791bf495 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Received event network-vif-deleted-f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.859222] env[69982]: INFO nova.compute.manager [req-1c4016c6-816f-4b51-80fb-7869f6719a3f req-58426c7d-4eb4-4c04-b2e8-25e6791bf495 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Neutron deleted interface f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9; detaching it from the instance and deleting it from the info cache [ 940.859462] env[69982]: DEBUG nova.network.neutron [req-1c4016c6-816f-4b51-80fb-7869f6719a3f req-58426c7d-4eb4-4c04-b2e8-25e6791bf495 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.893379] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.908038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.720s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.908038] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 940.913387] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.731s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.913708] env[69982]: DEBUG nova.objects.instance [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lazy-loading 'resources' on Instance uuid a61e3d25-9064-4f18-b7f1-0045b705571a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.965376] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864879, 'name': ReconfigVM_Task, 'duration_secs': 0.710202} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.965666] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Reconfigured VM instance instance-0000003c to attach disk [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388/9b733e1e-0532-4d91-a460-6b1f1971f388.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 940.965956] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 941.040290] env[69982]: DEBUG nova.network.neutron [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Updated VIF entry in instance network info cache for port 2b95faf4-6ad3-4170-a93a-15d892a8ca46. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 941.040733] env[69982]: DEBUG nova.network.neutron [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Updating instance_info_cache with network_info: [{"id": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "address": "fa:16:3e:1d:9f:1a", "network": {"id": "0ad99a8e-5c66-4fcd-9f54-9df6ac1f1c35", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-439954045-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0fd9d77e845b44c2b8c488bac205c034", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e41c97-4d75-4041-ae71-321e7e9d480b", "external-id": "nsx-vlan-transportzone-483", "segmentation_id": 483, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b95faf4-6a", "ovs_interfaceid": "2b95faf4-6ad3-4170-a93a-15d892a8ca46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.136184] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864881, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.154209] env[69982]: DEBUG nova.compute.manager [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.154701] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.156110] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bbb610-18cd-41e2-a83b-fcea80da2c31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.165695] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.166126] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-711d5086-1cd5-46ab-a170-35e13be8f96c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.168540] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.168831] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0392c3f9-e361-4930-847b-d57a8657322e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.177416] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 941.177416] env[69982]: value = "task-3864883" [ 941.177416] env[69982]: _type = "Task" [ 941.177416] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.183168] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 941.183168] env[69982]: value = "task-3864884" [ 941.183168] env[69982]: _type = "Task" [ 941.183168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.192029] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.198690] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.268793] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864882, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.342351] env[69982]: DEBUG nova.network.neutron [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.362854] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-127e87fd-1857-4ce7-b1e1-027d649137a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.374927] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ad4984-746c-4eb1-b945-5c8de728b293 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.422096] env[69982]: DEBUG nova.compute.utils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 941.427220] env[69982]: DEBUG nova.compute.manager [req-1c4016c6-816f-4b51-80fb-7869f6719a3f req-58426c7d-4eb4-4c04-b2e8-25e6791bf495 service nova] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Detach interface failed, port_id=f05f6887-5e47-4f58-9c47-3fb7e4e0a7c9, reason: Instance 570675a8-3ec0-4fe6-b123-d3901d56b8cf could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 941.428158] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 941.428334] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 941.475614] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-371a1ef4-1faa-4034-966c-327238941149 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.506888] env[69982]: DEBUG nova.policy [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '847765544bc249f7b2f5a61020cddd46', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '38945e679e1c4550b82ada82b9b1b7ec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 941.508826] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33abb7d0-6f12-4aa9-8de3-db8074b45709 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.530590] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 941.543924] env[69982]: DEBUG oslo_concurrency.lockutils [req-572dbc26-fb50-4638-a8cd-ab0abd80bee0 req-f682efab-3ad1-4707-af22-f95bf9e3ba8f service nova] Releasing lock "refresh_cache-bba6f430-5af5-4d8a-9cf4-082207c170a5" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.637675] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864881, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.691643] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.697806] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.772730] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864882, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.784078] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 941.785151] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b893fa-f757-4201-98a1-e391e7b89159 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.795334] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 941.795556] env[69982]: ERROR oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk due to incomplete transfer. [ 941.795857] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-97777ba7-12d6-45c9-a24f-8fccea10303f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.815969] env[69982]: DEBUG oslo_vmware.rw_handles [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522b30cd-a174-6de6-9169-791d85026a69/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 941.815969] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Uploaded image b87792c6-b066-467e-ada6-8dd52594cca2 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 941.818952] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 941.818952] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5d2da34f-63d4-4772-9601-052d35b4978e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.830293] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 941.830293] env[69982]: value = "task-3864885" [ 941.830293] env[69982]: _type = "Task" [ 941.830293] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.840474] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864885, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.846183] env[69982]: INFO nova.compute.manager [-] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Took 1.31 seconds to deallocate network for instance. [ 941.866168] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Successfully created port: ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 941.929348] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 942.063158] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1e5654-f4ec-4534-9916-f6ba8263698c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.073471] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fa0686-05e4-4a69-b430-e90611314286 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.109153] env[69982]: DEBUG nova.network.neutron [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Port f9356a59-fa6f-4664-b5ff-4a2609f506c3 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 942.111191] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda82c8d-5675-4067-b212-95b1a890cecd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.120547] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3dc71a-9344-4f57-a7d0-109728439ab6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.140052] env[69982]: DEBUG nova.compute.provider_tree [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.147685] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864881, 'name': Rename_Task, 'duration_secs': 1.222996} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.147883] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.148076] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-162cb436-eea6-41a2-8ee8-d43ad2525f75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.155954] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 942.155954] env[69982]: value = "task-3864886" [ 942.155954] env[69982]: _type = "Task" [ 942.155954] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.165918] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.193385] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864883, 'name': PowerOffVM_Task, 'duration_secs': 0.856542} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.194531] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.194531] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.194742] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87b5b8e7-209c-419e-8f22-dd5bd6bfd8ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.200239] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864884, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.278216] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864882, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.289553] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.289816] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.290019] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleting the datastore file [datastore1] 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.290697] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b8b1b96-e7ac-49fa-b21b-65bda0280b32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.299573] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for the task: (returnval){ [ 942.299573] env[69982]: value = "task-3864888" [ 942.299573] env[69982]: _type = "Task" [ 942.299573] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.338156] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864885, 'name': Destroy_Task, 'duration_secs': 0.499747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.338156] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Destroyed the VM [ 942.338521] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 942.338808] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9b2f8d9b-ab51-4595-8918-0708b86dba16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.346551] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 942.346551] env[69982]: value = "task-3864889" [ 942.346551] env[69982]: _type = "Task" [ 942.346551] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.356332] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864889, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.357327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.644048] env[69982]: DEBUG nova.scheduler.client.report [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 942.671223] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864886, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.700180] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864884, 'name': PowerOffVM_Task, 'duration_secs': 1.255811} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.701475] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.701790] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.702736] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e234813-ed02-426f-95d0-321577e2b670 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.714107] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.714489] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b85eadab-07ff-4986-bf22-c062071ba974 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.771055] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864882, 'name': CreateVM_Task, 'duration_secs': 1.542823} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.771055] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.772718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.772718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.772718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.772968] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c23747cc-0774-46bd-b304-c5240b77a276 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.778383] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 942.778383] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b7be9-d5e7-b017-d0ab-fe6c204bd61d" [ 942.778383] env[69982]: _type = "Task" [ 942.778383] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.788133] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b7be9-d5e7-b017-d0ab-fe6c204bd61d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.809944] env[69982]: DEBUG oslo_vmware.api [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Task: {'id': task-3864888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232566} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.810339] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.810964] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.810964] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.811172] env[69982]: INFO nova.compute.manager [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Took 1.66 seconds to destroy the instance on the hypervisor. [ 942.811303] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.811540] env[69982]: DEBUG nova.compute.manager [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.811676] env[69982]: DEBUG nova.network.neutron [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 942.860177] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864889, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.939680] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 942.972391] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 942.972683] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 942.972879] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 942.973195] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 942.973338] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 942.973598] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 942.973841] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 942.974038] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 942.974259] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 942.974475] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 942.974693] env[69982]: DEBUG nova.virt.hardware [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 942.975662] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8b53d3-20d3-411f-9644-96f6d4a9b96f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.986821] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a02412-7d0b-4834-86ed-0ae2a00386f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.090666] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "ab14774e-c834-41e9-bb3f-87722b51070e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.091043] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.146030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.146030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.146030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.151957] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.155373] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.557s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.157628] env[69982]: INFO nova.compute.claims [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.176532] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864886, 'name': PowerOnVM_Task, 'duration_secs': 0.562584} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.176895] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.177254] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Took 10.88 seconds to spawn the instance on the hypervisor. [ 943.177427] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.178670] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7470ef6-d231-444e-af4f-4db18d6f82bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.195201] env[69982]: INFO nova.scheduler.client.report [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleted allocations for instance a61e3d25-9064-4f18-b7f1-0045b705571a [ 943.231987] env[69982]: DEBUG nova.compute.manager [req-a4d08b8e-5893-471e-b592-21f272734834 req-8264ee7a-d473-4627-8654-75047ea2da8d service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Received event network-vif-deleted-5d41e03b-3fd8-4389-a457-2000cf628f86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.232214] env[69982]: INFO nova.compute.manager [req-a4d08b8e-5893-471e-b592-21f272734834 req-8264ee7a-d473-4627-8654-75047ea2da8d service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Neutron deleted interface 5d41e03b-3fd8-4389-a457-2000cf628f86; detaching it from the instance and deleting it from the info cache [ 943.232389] env[69982]: DEBUG nova.network.neutron [req-a4d08b8e-5893-471e-b592-21f272734834 req-8264ee7a-d473-4627-8654-75047ea2da8d service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.290484] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b7be9-d5e7-b017-d0ab-fe6c204bd61d, 'name': SearchDatastore_Task, 'duration_secs': 0.012327} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.291363] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.291466] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.291762] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.291996] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.292272] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.292611] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3acc2efe-e72c-4a7c-acde-0e168ca7a5b8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.304885] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.305083] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.305847] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c60a6db5-be95-461f-b409-1877cb181caf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.312185] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 943.312185] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52981b5a-0f24-7eb3-2930-299f2f2a4284" [ 943.312185] env[69982]: _type = "Task" [ 943.312185] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.321960] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52981b5a-0f24-7eb3-2930-299f2f2a4284, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.362594] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864889, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.573763] env[69982]: DEBUG nova.compute.manager [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Received event network-vif-plugged-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.573956] env[69982]: DEBUG oslo_concurrency.lockutils [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] Acquiring lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.574081] env[69982]: DEBUG oslo_concurrency.lockutils [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.574254] env[69982]: DEBUG oslo_concurrency.lockutils [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.574418] env[69982]: DEBUG nova.compute.manager [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] No waiting events found dispatching network-vif-plugged-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 943.574576] env[69982]: WARNING nova.compute.manager [req-529beb79-69e6-46a0-bc14-0b43b8fac000 req-d0418b62-ce69-4c6c-be31-69dc338c0b5a service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Received unexpected event network-vif-plugged-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f for instance with vm_state building and task_state spawning. [ 943.655610] env[69982]: DEBUG nova.network.neutron [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.705179] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Took 50.17 seconds to build instance. [ 943.706703] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea67e9cd-5cfc-441d-8921-486fc94b55f2 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a61e3d25-9064-4f18-b7f1-0045b705571a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.113s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.735085] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9ffecac-2cd5-4cf9-b2f8-849333967e2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.753025] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30507bb6-ccab-4b0b-9a57-5c19cb6c2b41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.805048] env[69982]: DEBUG nova.compute.manager [req-a4d08b8e-5893-471e-b592-21f272734834 req-8264ee7a-d473-4627-8654-75047ea2da8d service nova] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Detach interface failed, port_id=5d41e03b-3fd8-4389-a457-2000cf628f86, reason: Instance 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 943.822983] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52981b5a-0f24-7eb3-2930-299f2f2a4284, 'name': SearchDatastore_Task, 'duration_secs': 0.012422} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.823813] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-853a36de-6884-403b-acce-058f0b550a94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.830093] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 943.830093] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b336-01bd-a048-9ed2-3c0137450615" [ 943.830093] env[69982]: _type = "Task" [ 943.830093] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.838898] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b336-01bd-a048-9ed2-3c0137450615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.858299] env[69982]: DEBUG oslo_vmware.api [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3864889, 'name': RemoveSnapshot_Task, 'duration_secs': 1.016449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.858692] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 943.858953] env[69982]: INFO nova.compute.manager [None req-1732063c-95ef-4202-98ca-c04841687b1b tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Took 18.21 seconds to snapshot the instance on the hypervisor. [ 943.949880] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Successfully updated port: ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 944.158646] env[69982]: INFO nova.compute.manager [-] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Took 1.35 seconds to deallocate network for instance. [ 944.209482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.164s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.210634] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.210803] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.210983] env[69982]: DEBUG nova.network.neutron [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 944.341017] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b336-01bd-a048-9ed2-3c0137450615, 'name': SearchDatastore_Task, 'duration_secs': 0.019794} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.343665] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.343934] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] bba6f430-5af5-4d8a-9cf4-082207c170a5/bba6f430-5af5-4d8a-9cf4-082207c170a5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 944.344405] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dd290c9-87ab-4935-af70-2ed19141b1b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.351572] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 944.351572] env[69982]: value = "task-3864891" [ 944.351572] env[69982]: _type = "Task" [ 944.351572] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.361201] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.455194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.455411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.455677] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 944.523606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.524069] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.524158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.524350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.524518] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.526960] env[69982]: INFO nova.compute.manager [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Terminating instance [ 944.654691] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d70445-8730-4735-9900-f4045e75b5dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.662857] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702b9324-aa8c-4b8b-bb17-76998d29bb4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.668760] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.696629] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f69af2d-31d1-4bb4-ba89-1b077d5898db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.705366] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f14e588-4077-457b-b752-d1cd60e15a4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.724684] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 944.727908] env[69982]: DEBUG nova.compute.provider_tree [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 944.862868] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864891, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.000964] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 945.018243] env[69982]: DEBUG nova.network.neutron [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.030824] env[69982]: DEBUG nova.compute.manager [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.031078] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.032784] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3463411f-5931-43f3-badf-fa7e08b1d996 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.044774] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.045153] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f21c65e9-d843-4d33-8129-328e9d434254 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.052942] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 945.052942] env[69982]: value = "task-3864892" [ 945.052942] env[69982]: _type = "Task" [ 945.052942] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.063605] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864892, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.095819] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.096042] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.096216] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.096502] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ba416d4-b693-42ad-b09f-271f9a4cf207 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.107776] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 945.107776] env[69982]: value = "task-3864893" [ 945.107776] env[69982]: _type = "Task" [ 945.107776] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.117742] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864893, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.210770] env[69982]: DEBUG nova.network.neutron [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Updating instance_info_cache with network_info: [{"id": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "address": "fa:16:3e:78:01:05", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae227fd2-20", "ovs_interfaceid": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.236026] env[69982]: DEBUG nova.scheduler.client.report [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.259058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.369023] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864891, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795372} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.369023] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] bba6f430-5af5-4d8a-9cf4-082207c170a5/bba6f430-5af5-4d8a-9cf4-082207c170a5.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 945.369023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 945.369023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-948f07fe-ed67-444a-a9de-5311befa5c22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.374731] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 945.374731] env[69982]: value = "task-3864894" [ 945.374731] env[69982]: _type = "Task" [ 945.374731] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.384805] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.520755] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.563746] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864892, 'name': PowerOffVM_Task, 'duration_secs': 0.337885} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.564013] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.564200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.564458] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99f27847-25af-4206-b7f5-53a4faab10ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.604022] env[69982]: DEBUG nova.compute.manager [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Received event network-changed-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.604245] env[69982]: DEBUG nova.compute.manager [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Refreshing instance network info cache due to event network-changed-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 945.604535] env[69982]: DEBUG oslo_concurrency.lockutils [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] Acquiring lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.618989] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864893, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167927} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.620615] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.620615] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.620797] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.631165] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.631414] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.631628] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleting the datastore file [datastore2] a4064177-051b-4ec8-a1fc-fa5d299add8b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.631875] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-56d17b4d-06ec-4615-822c-ffd938071da8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.641361] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for the task: (returnval){ [ 945.641361] env[69982]: value = "task-3864896" [ 945.641361] env[69982]: _type = "Task" [ 945.641361] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.650249] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.714229] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.714903] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Instance network_info: |[{"id": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "address": "fa:16:3e:78:01:05", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae227fd2-20", "ovs_interfaceid": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 945.715124] env[69982]: DEBUG oslo_concurrency.lockutils [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] Acquired lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.715298] env[69982]: DEBUG nova.network.neutron [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Refreshing network info cache for port ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 945.717089] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:78:01:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 945.727052] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.728410] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 945.728771] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f5e1e98-c6e8-44ad-9964-4abaf582c7cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.746092] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.746611] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.749467] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.277s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.750863] env[69982]: INFO nova.compute.claims [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 945.760129] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 945.760129] env[69982]: value = "task-3864897" [ 945.760129] env[69982]: _type = "Task" [ 945.760129] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.769142] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864897, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.885829] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07199} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.886143] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 945.887037] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aad2f6a-2355-4b88-8b8a-495191b0fc3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.912164] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] bba6f430-5af5-4d8a-9cf4-082207c170a5/bba6f430-5af5-4d8a-9cf4-082207c170a5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.912509] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb7336b8-a1d0-4a30-84b3-c05610bd036a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.936348] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 945.936348] env[69982]: value = "task-3864898" [ 945.936348] env[69982]: _type = "Task" [ 945.936348] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.947865] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.043988] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1fc7e8-b1fb-4969-a8c1-4609e1142787 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.065180] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f2271a-c417-4e0a-8f81-53ed7da3e16c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.073851] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 946.154598] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.256665] env[69982]: DEBUG nova.compute.utils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.260721] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.260893] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.278771] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864897, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.334946] env[69982]: DEBUG nova.policy [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c84472005ef43d99658fa6f5cf59bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07f7b975ecb449a290e2ae6582e07016', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.448863] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.518922] env[69982]: DEBUG nova.network.neutron [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Updated VIF entry in instance network info cache for port ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 946.519509] env[69982]: DEBUG nova.network.neutron [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Updating instance_info_cache with network_info: [{"id": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "address": "fa:16:3e:78:01:05", "network": {"id": "e10d434b-963b-4921-9062-e962c99f8727", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1142282702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "38945e679e1c4550b82ada82b9b1b7ec", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae227fd2-20", "ovs_interfaceid": "ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.583272] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.583272] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-629f9047-a34f-4ac1-b593-d0f51929e606 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.589168] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 946.589168] env[69982]: value = "task-3864899" [ 946.589168] env[69982]: _type = "Task" [ 946.589168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.601388] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.654312] env[69982]: DEBUG oslo_vmware.api [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Task: {'id': task-3864896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.52665} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.654615] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.654911] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.655049] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.655190] env[69982]: INFO nova.compute.manager [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Took 1.62 seconds to destroy the instance on the hypervisor. [ 946.655452] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.655722] env[69982]: DEBUG nova.compute.manager [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.655813] env[69982]: DEBUG nova.network.neutron [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 946.667383] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 946.667628] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.667784] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 946.667989] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.668324] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 946.668508] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 946.668728] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 946.668901] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 946.669069] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 946.669332] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 946.669516] env[69982]: DEBUG nova.virt.hardware [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 946.670799] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf42d63-5c2c-4978-918b-cde996207df9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.680140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d438516e-b949-4deb-ae5a-300b0158f56e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.697100] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:e1:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e9d6238-fb82-48aa-8702-091435aae1b1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.704735] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.708312] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Successfully created port: 18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.710421] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.710733] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e37a59b2-fa86-456f-a4a2-f8ea7bb127f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.735018] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.735018] env[69982]: value = "task-3864900" [ 946.735018] env[69982]: _type = "Task" [ 946.735018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.745998] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864900, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.762543] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.786891] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864897, 'name': CreateVM_Task, 'duration_secs': 0.613982} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.787121] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 946.787886] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.788067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.788469] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 946.788768] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20906bbc-1416-4c74-b5b9-5c64395019ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.794579] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 946.794579] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5e132-705c-7977-a40c-ae6c84aa3857" [ 946.794579] env[69982]: _type = "Task" [ 946.794579] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.807688] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5e132-705c-7977-a40c-ae6c84aa3857, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.947660] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864898, 'name': ReconfigVM_Task, 'duration_secs': 0.719288} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.950685] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Reconfigured VM instance instance-00000047 to attach disk [datastore2] bba6f430-5af5-4d8a-9cf4-082207c170a5/bba6f430-5af5-4d8a-9cf4-082207c170a5.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.952793] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81c2a3d5-dbb6-4896-9dc5-03b3427a81b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.961232] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 946.961232] env[69982]: value = "task-3864901" [ 946.961232] env[69982]: _type = "Task" [ 946.961232] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.973776] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864901, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.025664] env[69982]: DEBUG oslo_concurrency.lockutils [req-75dcb688-37cb-42a5-b332-b077c2ca04e9 req-4239f4b3-8ac6-4dc2-9ebd-fccfcf6b2fed service nova] Releasing lock "refresh_cache-a9a14fa8-7f58-48f9-994d-b5063833a81b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.106416] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864899, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.249755] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864900, 'name': CreateVM_Task, 'duration_secs': 0.383845} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.249937] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.250746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.250907] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.251317] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 947.251589] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49b9d86d-0b94-43d8-9d1b-c52474b3343a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.258916] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 947.258916] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228988a-18d1-8339-04ba-32151215a632" [ 947.258916] env[69982]: _type = "Task" [ 947.258916] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.277543] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228988a-18d1-8339-04ba-32151215a632, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.306614] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5e132-705c-7977-a40c-ae6c84aa3857, 'name': SearchDatastore_Task, 'duration_secs': 0.012845} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.307783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.307783] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.307783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.307783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.308066] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.308066] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55941182-b7e8-457d-a803-09812e5fd0b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.322354] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.322552] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.323408] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51bfcb43-def6-402c-8058-f6f900e23cf8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.336175] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 947.336175] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5264896e-708c-9508-db57-037b09028614" [ 947.336175] env[69982]: _type = "Task" [ 947.336175] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.343710] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5264896e-708c-9508-db57-037b09028614, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.379348] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acab5911-e335-4f61-b9e8-0df4fc84e33d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.389295] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6921f5-7113-4cfb-a63b-854cb445ea42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.393667] env[69982]: DEBUG nova.compute.manager [req-518e29c0-2a36-4f0f-bc3b-f0c78fb3e336 req-d22af330-547e-4a28-aec3-d573f8a49bb0 service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Received event network-vif-deleted-e56d9c29-71a4-4d37-b444-7c544cd50695 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.393855] env[69982]: INFO nova.compute.manager [req-518e29c0-2a36-4f0f-bc3b-f0c78fb3e336 req-d22af330-547e-4a28-aec3-d573f8a49bb0 service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Neutron deleted interface e56d9c29-71a4-4d37-b444-7c544cd50695; detaching it from the instance and deleting it from the info cache [ 947.394020] env[69982]: DEBUG nova.network.neutron [req-518e29c0-2a36-4f0f-bc3b-f0c78fb3e336 req-d22af330-547e-4a28-aec3-d573f8a49bb0 service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.424995] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893dc884-92fb-43be-9647-7e18c42620f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.434238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0adfaa7-c37c-4376-b736-dab87155255d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.449326] env[69982]: DEBUG nova.compute.provider_tree [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.471728] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864901, 'name': Rename_Task, 'duration_secs': 0.205882} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.471728] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 947.471728] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b4a9a96-1df3-440e-8d38-07a56fafc589 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.477785] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 947.477785] env[69982]: value = "task-3864902" [ 947.477785] env[69982]: _type = "Task" [ 947.477785] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.486952] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.553619] env[69982]: DEBUG nova.network.neutron [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.607371] env[69982]: DEBUG oslo_vmware.api [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3864899, 'name': PowerOnVM_Task, 'duration_secs': 0.554779} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.607371] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.607371] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec1e38dd-e6b8-45a7-b0a2-889619bb882b tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance '9b733e1e-0532-4d91-a460-6b1f1971f388' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 947.769523] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228988a-18d1-8339-04ba-32151215a632, 'name': SearchDatastore_Task, 'duration_secs': 0.011878} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.769836] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.770080] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 947.770323] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.770475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.770654] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 947.770915] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e370795c-dc0c-4047-ab3f-25ab1ebcea85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.782341] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.785587] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 947.785766] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 947.786516] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3034754f-9454-4ab8-8afc-c4bd9552cc54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.792173] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 947.792173] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526b72c5-07ce-9ce0-af2a-5f3f0177fe51" [ 947.792173] env[69982]: _type = "Task" [ 947.792173] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.803876] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526b72c5-07ce-9ce0-af2a-5f3f0177fe51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.806908] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 947.807164] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 947.807323] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 947.807503] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 947.807644] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 947.807791] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 947.807996] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 947.808172] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 947.808346] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 947.808508] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 947.808676] env[69982]: DEBUG nova.virt.hardware [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 947.809532] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f024c35e-756c-4a9c-98ee-b67e08603cb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.817971] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ce25c1-2155-4858-9869-941230d57687 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.845186] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5264896e-708c-9508-db57-037b09028614, 'name': SearchDatastore_Task, 'duration_secs': 0.028491} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.846114] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-344e0a87-5bb7-4bef-b910-1db566c729bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.852021] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 947.852021] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5b1ae-7970-66c6-dffe-1661fcffabf9" [ 947.852021] env[69982]: _type = "Task" [ 947.852021] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.857229] env[69982]: DEBUG nova.compute.manager [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.858094] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ed5717-b09d-44df-befc-d62151a991a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.863879] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5b1ae-7970-66c6-dffe-1661fcffabf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.897347] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc34d71b-be6d-46b3-878b-5fa0174c01ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.907721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b1f689-8bea-4359-89e8-e423694a9032 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.947992] env[69982]: DEBUG nova.compute.manager [req-518e29c0-2a36-4f0f-bc3b-f0c78fb3e336 req-d22af330-547e-4a28-aec3-d573f8a49bb0 service nova] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Detach interface failed, port_id=e56d9c29-71a4-4d37-b444-7c544cd50695, reason: Instance a4064177-051b-4ec8-a1fc-fa5d299add8b could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.952374] env[69982]: DEBUG nova.scheduler.client.report [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.988753] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864902, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.056762] env[69982]: INFO nova.compute.manager [-] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Took 1.40 seconds to deallocate network for instance. [ 948.245342] env[69982]: DEBUG nova.compute.manager [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Received event network-vif-plugged-18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 948.245342] env[69982]: DEBUG oslo_concurrency.lockutils [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.245342] env[69982]: DEBUG oslo_concurrency.lockutils [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.245342] env[69982]: DEBUG oslo_concurrency.lockutils [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.245342] env[69982]: DEBUG nova.compute.manager [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] No waiting events found dispatching network-vif-plugged-18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 948.246991] env[69982]: WARNING nova.compute.manager [req-3811a572-36bd-4425-bca9-56a197e9fe80 req-34d473fd-687c-43b3-8160-8c34000acfd0 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Received unexpected event network-vif-plugged-18a2c6c8-1313-42eb-a058-40e272e7fda3 for instance with vm_state building and task_state spawning. [ 948.303316] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526b72c5-07ce-9ce0-af2a-5f3f0177fe51, 'name': SearchDatastore_Task, 'duration_secs': 0.015261} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.304227] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23103302-b2e4-4a5d-9a21-ed8188593d58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.310505] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 948.310505] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e5edd-60c8-f5fc-066f-1b5300f9569b" [ 948.310505] env[69982]: _type = "Task" [ 948.310505] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.319595] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e5edd-60c8-f5fc-066f-1b5300f9569b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.351365] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Successfully updated port: 18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.364217] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b5b1ae-7970-66c6-dffe-1661fcffabf9, 'name': SearchDatastore_Task, 'duration_secs': 0.011429} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.364654] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.364999] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a9a14fa8-7f58-48f9-994d-b5063833a81b/a9a14fa8-7f58-48f9-994d-b5063833a81b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.365509] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e66b75c-7f91-4866-bad1-06580b39c1c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.371039] env[69982]: INFO nova.compute.manager [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] instance snapshotting [ 948.371667] env[69982]: DEBUG nova.objects.instance [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.375723] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 948.375723] env[69982]: value = "task-3864903" [ 948.375723] env[69982]: _type = "Task" [ 948.375723] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.388422] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.457665] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.457910] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 948.461075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.857s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.462762] env[69982]: INFO nova.compute.claims [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 948.489421] env[69982]: DEBUG oslo_vmware.api [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864902, 'name': PowerOnVM_Task, 'duration_secs': 0.956948} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.489813] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 948.489813] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Took 10.60 seconds to spawn the instance on the hypervisor. [ 948.490031] env[69982]: DEBUG nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 948.490785] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c92434-e9a0-41ec-bb3b-5c9973ca653e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.563754] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.826082] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526e5edd-60c8-f5fc-066f-1b5300f9569b, 'name': SearchDatastore_Task, 'duration_secs': 0.020759} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.826082] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.826082] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 948.826082] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76cea0e5-d094-4171-86a5-fa52835b5900 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.836019] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 948.836019] env[69982]: value = "task-3864904" [ 948.836019] env[69982]: _type = "Task" [ 948.836019] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.844272] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.858481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.858631] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.858789] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 948.881379] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184ee876-b848-4d85-9f5f-30d3cf3fda3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.890706] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864903, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.906116] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0b07bc-3927-40ea-b75a-383cbb6480fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.968408] env[69982]: DEBUG nova.compute.utils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 948.972489] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 948.972761] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.010336] env[69982]: INFO nova.compute.manager [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Took 52.13 seconds to build instance. [ 949.025370] env[69982]: DEBUG nova.policy [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '005bbd5e6a314bf48f443ddc050f0a75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18dcc70df5e144e3b4f0592b0112aaf7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.348084] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864904, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.389051] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769699} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.389856] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a9a14fa8-7f58-48f9-994d-b5063833a81b/a9a14fa8-7f58-48f9-994d-b5063833a81b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.389856] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.390175] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-70e41c9a-9f29-457a-b46d-21513a0f35e2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.400087] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 949.400087] env[69982]: value = "task-3864905" [ 949.400087] env[69982]: _type = "Task" [ 949.400087] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.409315] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 949.415525] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.419568] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 949.420082] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-de95833c-f205-437b-87c6-ac54b6a90135 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.431864] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 949.431864] env[69982]: value = "task-3864906" [ 949.431864] env[69982]: _type = "Task" [ 949.431864] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.445069] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864906, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.480921] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 949.515578] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f75e2271-f4e7-464a-8740-0e9193db01ba tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.436s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.556780] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Successfully created port: 46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.651242] env[69982]: DEBUG nova.network.neutron [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating instance_info_cache with network_info: [{"id": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "address": "fa:16:3e:38:fc:9e", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a2c6c8-13", "ovs_interfaceid": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.851792] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864904, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72465} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.851792] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 949.851792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 949.852111] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-142acf61-cefd-46ef-8edd-147bd8e642b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.860474] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 949.860474] env[69982]: value = "task-3864907" [ 949.860474] env[69982]: _type = "Task" [ 949.860474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.875639] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.921972] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091373} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.922844] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 949.924735] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7acdb7-81ae-4f73-91b3-03d6ff39e018 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.955782] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] a9a14fa8-7f58-48f9-994d-b5063833a81b/a9a14fa8-7f58-48f9-994d-b5063833a81b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 949.963132] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f634612f-b5c0-45c0-9dc8-62fdb8005e40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.986698] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864906, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.989348] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 949.989348] env[69982]: value = "task-3864908" [ 949.989348] env[69982]: _type = "Task" [ 949.989348] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.005895] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.128655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63b9662-1661-4cfe-8057-623c15fc071f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.138362] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd212d2a-2a3c-4b03-b6b3-144aa30fef66 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.173770] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.174203] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance network_info: |[{"id": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "address": "fa:16:3e:38:fc:9e", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a2c6c8-13", "ovs_interfaceid": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.174979] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:fc:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18a2c6c8-1313-42eb-a058-40e272e7fda3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.183112] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.183937] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011fde54-3bc3-46ea-8bf7-92d49e3cabf7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.186996] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.187279] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8eb1a62-ae25-4d9b-b9ec-97942cdd066a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.209082] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1edd166-cd55-4c41-b4af-87d38b25c497 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.214653] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.214653] env[69982]: value = "task-3864909" [ 950.214653] env[69982]: _type = "Task" [ 950.214653] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.227054] env[69982]: DEBUG nova.compute.provider_tree [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.234265] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864909, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.278377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "10a4294b-39ce-4643-98b5-71ac283f05f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.278743] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.279037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.279244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.279417] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.282503] env[69982]: DEBUG nova.compute.manager [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Received event network-changed-18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.282722] env[69982]: DEBUG nova.compute.manager [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Refreshing instance network info cache due to event network-changed-18a2c6c8-1313-42eb-a058-40e272e7fda3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 950.283356] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] Acquiring lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.283520] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] Acquired lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.283697] env[69982]: DEBUG nova.network.neutron [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Refreshing network info cache for port 18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 950.285046] env[69982]: INFO nova.compute.manager [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Terminating instance [ 950.374691] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078968} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.375039] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.376091] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed14427-c475-4172-a685-33c8a87de11a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.411737] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.412197] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4a8e580-83b1-4b6c-b89b-3dd014ef35c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.429322] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.429461] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.429608] env[69982]: DEBUG nova.compute.manager [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Going to confirm migration 3 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 950.439031] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 950.439031] env[69982]: value = "task-3864910" [ 950.439031] env[69982]: _type = "Task" [ 950.439031] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.450162] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864910, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.459820] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864906, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.501367] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 950.504488] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.528659] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 950.528909] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 950.529221] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 950.529475] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 950.529694] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 950.529896] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 950.530122] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 950.530286] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 950.530458] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 950.530618] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 950.530791] env[69982]: DEBUG nova.virt.hardware [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 950.532153] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b38cbe-9add-4371-be20-d36369992ce4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.543630] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94ac1ae-3982-4313-a247-1a48b6e770b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.727764] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864909, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.731067] env[69982]: DEBUG nova.scheduler.client.report [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.792558] env[69982]: DEBUG nova.compute.manager [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 950.792988] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 950.794455] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cdb201-84f3-4981-b0ba-93f3fbca6a06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.806623] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 950.807028] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5d267b0-e847-424d-be9b-d7c304912637 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.814562] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 950.814562] env[69982]: value = "task-3864911" [ 950.814562] env[69982]: _type = "Task" [ 950.814562] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.824421] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.954095] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.965852] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864906, 'name': CreateSnapshot_Task, 'duration_secs': 1.365096} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.966265] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 950.967184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b57268-29ce-43e6-9222-5fd4bbafb13f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.008024] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864908, 'name': ReconfigVM_Task, 'duration_secs': 0.927997} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.008024] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Reconfigured VM instance instance-00000048 to attach disk [datastore2] a9a14fa8-7f58-48f9-994d-b5063833a81b/a9a14fa8-7f58-48f9-994d-b5063833a81b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.008024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cac44261-c6b9-4566-8133-402cda1fd48a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.017778] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 951.017778] env[69982]: value = "task-3864912" [ 951.017778] env[69982]: _type = "Task" [ 951.017778] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.033031] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864912, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.043229] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.043542] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.043764] env[69982]: DEBUG nova.network.neutron [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 951.044067] env[69982]: DEBUG nova.objects.instance [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'info_cache' on Instance uuid 9b733e1e-0532-4d91-a460-6b1f1971f388 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.229449] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864909, 'name': CreateVM_Task, 'duration_secs': 0.664636} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.233394] env[69982]: DEBUG nova.network.neutron [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updated VIF entry in instance network info cache for port 18a2c6c8-1313-42eb-a058-40e272e7fda3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 951.236045] env[69982]: DEBUG nova.network.neutron [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating instance_info_cache with network_info: [{"id": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "address": "fa:16:3e:38:fc:9e", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a2c6c8-13", "ovs_interfaceid": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.236045] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.240535] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.241218] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 951.245199] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.246025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.246025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 951.246358] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.464s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.246358] env[69982]: DEBUG nova.objects.instance [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lazy-loading 'resources' on Instance uuid a3e3106d-b7df-49c8-9341-a843977aefe4 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 951.247875] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d25aba8-6900-4015-9189-4a6a7d8ab74d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.261648] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 951.261648] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ac6722-f3f4-4ed1-ae6b-2548f2c2b01b" [ 951.261648] env[69982]: _type = "Task" [ 951.261648] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.274901] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ac6722-f3f4-4ed1-ae6b-2548f2c2b01b, 'name': SearchDatastore_Task, 'duration_secs': 0.011463} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.275187] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.275432] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.275662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.275809] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.275990] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.276334] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-190a376f-7057-4bd0-b19d-97c5464caf4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.289954] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.290206] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.291386] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fa2aa5b-31e9-4f36-a20b-d9029e24aef7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.298711] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 951.298711] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d890f5-c41a-89a8-ba0c-5a261dec486b" [ 951.298711] env[69982]: _type = "Task" [ 951.298711] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.308767] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d890f5-c41a-89a8-ba0c-5a261dec486b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.325824] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864911, 'name': PowerOffVM_Task, 'duration_secs': 0.289041} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.326114] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 951.326285] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 951.326540] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aad594d6-a356-4986-9e13-deb42c1e913a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.386771] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Successfully updated port: 46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 951.390623] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.391032] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.391316] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleting the datastore file [datastore1] 10a4294b-39ce-4643-98b5-71ac283f05f5 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.391604] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a50c5b6-b55e-4370-aadc-47b10ccf01a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.399095] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 951.399095] env[69982]: value = "task-3864914" [ 951.399095] env[69982]: _type = "Task" [ 951.399095] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.409563] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864914, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.452103] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864910, 'name': ReconfigVM_Task, 'duration_secs': 0.819677} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.452103] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.452501] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7636b863-7bec-4201-b674-2aaaad66546e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.460020] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 951.460020] env[69982]: value = "task-3864915" [ 951.460020] env[69982]: _type = "Task" [ 951.460020] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.469319] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864915, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.495744] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 951.496113] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2fbfa84d-4d01-4e71-af41-99bb53d44613 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.505538] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 951.505538] env[69982]: value = "task-3864916" [ 951.505538] env[69982]: _type = "Task" [ 951.505538] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.515156] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.529949] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864912, 'name': Rename_Task, 'duration_secs': 0.198596} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.530246] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.530505] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b78f2399-fae6-4248-a4e8-d3819b222b08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.537493] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 951.537493] env[69982]: value = "task-3864917" [ 951.537493] env[69982]: _type = "Task" [ 951.537493] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.549527] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.745486] env[69982]: DEBUG oslo_concurrency.lockutils [req-ba9f0e20-c92b-4adf-8a4c-1addcba76e9f req-fc5c7522-3fa0-4c6f-b3c2-456fe30e4c48 service nova] Releasing lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.749572] env[69982]: DEBUG nova.compute.utils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 951.751405] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 951.751537] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 951.812870] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d890f5-c41a-89a8-ba0c-5a261dec486b, 'name': SearchDatastore_Task, 'duration_secs': 0.011972} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.813473] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f610dbf-0596-4fbf-8514-8b8f421ce5c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.822168] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 951.822168] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228219f-8d95-c612-400d-7db87e7b6a24" [ 951.822168] env[69982]: _type = "Task" [ 951.822168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.827389] env[69982]: DEBUG nova.policy [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '361bff09a25e4b5ab6a071a458858131', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afaf89b0250d46048813da25c754e1a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 951.834464] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228219f-8d95-c612-400d-7db87e7b6a24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.890487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.890487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.890487] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 951.918023] env[69982]: DEBUG oslo_vmware.api [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3864914, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304269} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.921900] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.921900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.922281] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.922536] env[69982]: INFO nova.compute.manager [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 951.922946] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.924132] env[69982]: DEBUG nova.compute.manager [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.924132] env[69982]: DEBUG nova.network.neutron [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 951.973864] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864915, 'name': Rename_Task, 'duration_secs': 0.149947} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.974199] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 951.974473] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f097167-e2f0-4750-9e41-510a16bf836e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.985093] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 951.985093] env[69982]: value = "task-3864918" [ 951.985093] env[69982]: _type = "Task" [ 951.985093] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.000901] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.026865] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.057717] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864917, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.239245] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Successfully created port: a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 952.257996] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 952.334675] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228219f-8d95-c612-400d-7db87e7b6a24, 'name': SearchDatastore_Task, 'duration_secs': 0.019779} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.338401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.338401] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.338898] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-457be611-a675-4b6f-8983-d55e9ea10dfb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.347688] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 952.347688] env[69982]: value = "task-3864919" [ 952.347688] env[69982]: _type = "Task" [ 952.347688] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.356349] env[69982]: DEBUG nova.compute.manager [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Received event network-vif-plugged-46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.356573] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Acquiring lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.356779] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.356945] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.357119] env[69982]: DEBUG nova.compute.manager [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] No waiting events found dispatching network-vif-plugged-46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 952.357284] env[69982]: WARNING nova.compute.manager [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Received unexpected event network-vif-plugged-46457931-d8c0-40b0-864b-9a4b8489c524 for instance with vm_state building and task_state spawning. [ 952.357444] env[69982]: DEBUG nova.compute.manager [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Received event network-changed-46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.357594] env[69982]: DEBUG nova.compute.manager [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Refreshing instance network info cache due to event network-changed-46457931-d8c0-40b0-864b-9a4b8489c524. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 952.357757] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Acquiring lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.364290] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.405025] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0071706e-0d12-4107-b820-ce79bcc8f56c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.416013] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d66705-22d7-4e5c-aa20-b355ab3baf84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.456184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c3743e-799c-40ce-bb75-1cd7c8610045 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.459642] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 952.468084] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0720d9c-da70-46fa-a46b-6a1862bf204a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.488134] env[69982]: DEBUG nova.compute.provider_tree [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.502220] env[69982]: DEBUG oslo_vmware.api [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864918, 'name': PowerOnVM_Task, 'duration_secs': 0.518484} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.503370] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.503569] env[69982]: DEBUG nova.compute.manager [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.505071] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c884045-b070-4462-b8ce-939ccac139ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.526336] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.547680] env[69982]: DEBUG nova.network.neutron [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [{"id": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "address": "fa:16:3e:de:e7:35", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9356a59-fa", "ovs_interfaceid": "f9356a59-fa6f-4664-b5ff-4a2609f506c3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.553339] env[69982]: DEBUG oslo_vmware.api [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864917, 'name': PowerOnVM_Task, 'duration_secs': 0.77375} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.553339] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.553535] env[69982]: INFO nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Took 9.61 seconds to spawn the instance on the hypervisor. [ 952.553535] env[69982]: DEBUG nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 952.554373] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1d82d1-c4a1-4c15-be69-a27c0480acd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.559891] env[69982]: DEBUG nova.compute.manager [req-e5ad9da5-c497-4f42-8fa5-9672b73b25d9 req-ea8b96cf-d1e0-472b-b5f4-f4fece5ee000 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Received event network-vif-deleted-5c467e3e-6a47-4c32-b107-2c1b776e27ea {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.560428] env[69982]: INFO nova.compute.manager [req-e5ad9da5-c497-4f42-8fa5-9672b73b25d9 req-ea8b96cf-d1e0-472b-b5f4-f4fece5ee000 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Neutron deleted interface 5c467e3e-6a47-4c32-b107-2c1b776e27ea; detaching it from the instance and deleting it from the info cache [ 952.560848] env[69982]: DEBUG nova.network.neutron [req-e5ad9da5-c497-4f42-8fa5-9672b73b25d9 req-ea8b96cf-d1e0-472b-b5f4-f4fece5ee000 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.718847] env[69982]: DEBUG nova.network.neutron [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Updating instance_info_cache with network_info: [{"id": "46457931-d8c0-40b0-864b-9a4b8489c524", "address": "fa:16:3e:19:7b:85", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46457931-d8", "ovs_interfaceid": "46457931-d8c0-40b0-864b-9a4b8489c524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.860640] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864919, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.889831] env[69982]: DEBUG nova.network.neutron [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.996787] env[69982]: DEBUG nova.scheduler.client.report [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.024455] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.027054] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.054683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-9b733e1e-0532-4d91-a460-6b1f1971f388" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.054989] env[69982]: DEBUG nova.objects.instance [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'migration_context' on Instance uuid 9b733e1e-0532-4d91-a460-6b1f1971f388 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.066154] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5209d116-a562-4252-8df0-882f73d33dc9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.078170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a87181-8ad2-429f-b040-a1d7dc1a9ea0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.102404] env[69982]: INFO nova.compute.manager [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Took 49.27 seconds to build instance. [ 953.133312] env[69982]: DEBUG nova.compute.manager [req-e5ad9da5-c497-4f42-8fa5-9672b73b25d9 req-ea8b96cf-d1e0-472b-b5f4-f4fece5ee000 service nova] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Detach interface failed, port_id=5c467e3e-6a47-4c32-b107-2c1b776e27ea, reason: Instance 10a4294b-39ce-4643-98b5-71ac283f05f5 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 953.221988] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.222437] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Instance network_info: |[{"id": "46457931-d8c0-40b0-864b-9a4b8489c524", "address": "fa:16:3e:19:7b:85", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46457931-d8", "ovs_interfaceid": "46457931-d8c0-40b0-864b-9a4b8489c524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 953.222753] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Acquired lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.222935] env[69982]: DEBUG nova.network.neutron [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Refreshing network info cache for port 46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 953.224339] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:7b:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '767a3a48-41d4-4a0c-961d-0024837f63bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46457931-d8c0-40b0-864b-9a4b8489c524', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 953.237061] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 953.241634] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 953.241634] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-613e5999-227e-470e-898d-1db6345e30f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.264480] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 953.264480] env[69982]: value = "task-3864920" [ 953.264480] env[69982]: _type = "Task" [ 953.264480] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.268458] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 953.277569] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864920, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.293752] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 953.294031] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 953.294573] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 953.294573] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 953.295630] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 953.295630] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 953.295630] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 953.295630] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 953.295630] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 953.295844] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 953.295954] env[69982]: DEBUG nova.virt.hardware [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 953.296898] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a434616-50a8-4192-83b6-11887cffd623 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.308787] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a000dad-665b-4cc2-b975-0fad9cf6eda4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.361421] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864919, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.779191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.364576] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.364948] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.365249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c9bfe58e-ff32-4bea-8e05-b8f973b5d69e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.373859] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 953.373859] env[69982]: value = "task-3864921" [ 953.373859] env[69982]: _type = "Task" [ 953.373859] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.385295] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.393793] env[69982]: INFO nova.compute.manager [-] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Took 1.47 seconds to deallocate network for instance. [ 953.502976] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.257s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.505582] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.161s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.507374] env[69982]: INFO nova.compute.claims [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 953.512094] env[69982]: DEBUG nova.network.neutron [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Updated VIF entry in instance network info cache for port 46457931-d8c0-40b0-864b-9a4b8489c524. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 953.513124] env[69982]: DEBUG nova.network.neutron [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Updating instance_info_cache with network_info: [{"id": "46457931-d8c0-40b0-864b-9a4b8489c524", "address": "fa:16:3e:19:7b:85", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46457931-d8", "ovs_interfaceid": "46457931-d8c0-40b0-864b-9a4b8489c524", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.524869] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.536648] env[69982]: INFO nova.scheduler.client.report [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Deleted allocations for instance a3e3106d-b7df-49c8-9341-a843977aefe4 [ 953.557789] env[69982]: DEBUG nova.objects.base [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Object Instance<9b733e1e-0532-4d91-a460-6b1f1971f388> lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 953.558728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c5730d-528f-4be0-bd31-b72b75b4cff4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.580444] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20277f6-adc6-47ad-b091-8658462f9008 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.588415] env[69982]: DEBUG oslo_vmware.api [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 953.588415] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238055e-e02c-4e56-83ea-916dc2f8c99c" [ 953.588415] env[69982]: _type = "Task" [ 953.588415] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.600930] env[69982]: INFO nova.compute.manager [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Rebuilding instance [ 953.603255] env[69982]: DEBUG oslo_vmware.api [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238055e-e02c-4e56-83ea-916dc2f8c99c, 'name': SearchDatastore_Task, 'duration_secs': 0.00904} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.603605] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.606974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b9d53a5c-7ffa-4ccc-a479-3a0e7f625f83 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.362s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.641571] env[69982]: DEBUG nova.compute.manager [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.642697] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe00c336-ca04-4161-9957-133cae6a85e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.775346] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864920, 'name': CreateVM_Task, 'duration_secs': 0.464764} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.775533] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 953.776255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.776511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.776830] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 953.777758] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad9c37eb-0399-4079-b4e2-d6fd6822d3b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.783223] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 953.783223] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8b666-3e7d-06b7-ce8d-b4bb5d4215aa" [ 953.783223] env[69982]: _type = "Task" [ 953.783223] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.793134] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8b666-3e7d-06b7-ce8d-b4bb5d4215aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.884629] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080659} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.884929] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.885920] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3d0ec7-26c8-45a9-b740-e3a4f2e02056 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.904215] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.912908] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.914058] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06842a8f-7e22-46b4-9e0a-772dfc5f56ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.934648] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 953.934648] env[69982]: value = "task-3864922" [ 953.934648] env[69982]: _type = "Task" [ 953.934648] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.944663] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864922, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.969029] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Successfully updated port: a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 954.017933] env[69982]: DEBUG oslo_concurrency.lockutils [req-42c945b7-eca8-47bb-9bd1-14254d50f05c req-83b29076-d7af-4b4c-9ac7-cf4a3814ce99 service nova] Releasing lock "refresh_cache-5b79fc38-ace3-4f94-8d1c-b77912f44a1d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.033878] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864916, 'name': CloneVM_Task, 'duration_secs': 2.021985} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.035525] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created linked-clone VM from snapshot [ 954.036376] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4efd7ae1-7fcf-4108-9cf9-b6c5c33e31da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.048640] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploading image e6d8946f-e6b4-47f7-a539-ec50ec4ec3aa {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 954.051517] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c56252f7-f4de-4cee-bae6-305cda28b365 tempest-ServerMetadataNegativeTestJSON-371764788 tempest-ServerMetadataNegativeTestJSON-371764788-project-member] Lock "a3e3106d-b7df-49c8-9341-a843977aefe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.872s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.081451] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 954.081451] env[69982]: value = "vm-768004" [ 954.081451] env[69982]: _type = "VirtualMachine" [ 954.081451] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 954.081744] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8c24195b-80df-4f83-b5fb-1176fcf941c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.090804] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease: (returnval){ [ 954.090804] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528219ca-5962-a07f-4fc6-002460adbc38" [ 954.090804] env[69982]: _type = "HttpNfcLease" [ 954.090804] env[69982]: } obtained for exporting VM: (result){ [ 954.090804] env[69982]: value = "vm-768004" [ 954.090804] env[69982]: _type = "VirtualMachine" [ 954.090804] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 954.091061] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the lease: (returnval){ [ 954.091061] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528219ca-5962-a07f-4fc6-002460adbc38" [ 954.091061] env[69982]: _type = "HttpNfcLease" [ 954.091061] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 954.099820] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.099820] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528219ca-5962-a07f-4fc6-002460adbc38" [ 954.099820] env[69982]: _type = "HttpNfcLease" [ 954.099820] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 954.298023] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e8b666-3e7d-06b7-ce8d-b4bb5d4215aa, 'name': SearchDatastore_Task, 'duration_secs': 0.02689} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.298023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.298023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.298023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.298361] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.298361] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.298361] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb7aa432-2551-44e7-920c-0cc1dc71475b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.308606] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.309243] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.310174] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b9302be-cd74-41ac-8c04-cdc496a96f8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.317573] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 954.317573] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527724da-2cc7-dc17-3d30-df607ffe5f85" [ 954.317573] env[69982]: _type = "Task" [ 954.317573] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.327970] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527724da-2cc7-dc17-3d30-df607ffe5f85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.391599] env[69982]: DEBUG nova.compute.manager [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Received event network-vif-plugged-a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.392204] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.392772] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.393318] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.393756] env[69982]: DEBUG nova.compute.manager [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] No waiting events found dispatching network-vif-plugged-a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 954.394181] env[69982]: WARNING nova.compute.manager [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Received unexpected event network-vif-plugged-a8e19350-a6cb-4da2-a745-4a03db30a50b for instance with vm_state building and task_state spawning. [ 954.394590] env[69982]: DEBUG nova.compute.manager [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Received event network-changed-a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 954.394988] env[69982]: DEBUG nova.compute.manager [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Refreshing instance network info cache due to event network-changed-a8e19350-a6cb-4da2-a745-4a03db30a50b. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 954.396020] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Acquiring lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.396020] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Acquired lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.396020] env[69982]: DEBUG nova.network.neutron [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Refreshing network info cache for port a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 954.448401] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864922, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.474920] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.605587] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 954.605587] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528219ca-5962-a07f-4fc6-002460adbc38" [ 954.605587] env[69982]: _type = "HttpNfcLease" [ 954.605587] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 954.606185] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 954.606185] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528219ca-5962-a07f-4fc6-002460adbc38" [ 954.606185] env[69982]: _type = "HttpNfcLease" [ 954.606185] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 954.607468] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8520a3f-77b6-4405-b7ed-1ecc97ccfe1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.621435] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 954.621754] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 954.687859] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.692024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6361d5f3-d87b-482f-8c0a-40a9854b1049 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.702929] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 954.702929] env[69982]: value = "task-3864924" [ 954.702929] env[69982]: _type = "Task" [ 954.702929] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.717602] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.773578] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-42ca9d7f-ca92-4c5d-aa24-a0a993036280 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.838746] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527724da-2cc7-dc17-3d30-df607ffe5f85, 'name': SearchDatastore_Task, 'duration_secs': 0.023771} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.840052] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d30b6452-54c3-4d88-81e1-253e2e68da69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.847654] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 954.847654] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205f528-90ac-5791-df85-18ffdbda3725" [ 954.847654] env[69982]: _type = "Task" [ 954.847654] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.866349] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205f528-90ac-5791-df85-18ffdbda3725, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.951699] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864922, 'name': ReconfigVM_Task, 'duration_secs': 0.962539} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.952616] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.952910] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-899f064b-9d5b-4fbb-879f-3cf612983723 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.965793] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 954.965793] env[69982]: value = "task-3864925" [ 954.965793] env[69982]: _type = "Task" [ 954.965793] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.982687] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864925, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.010831] env[69982]: DEBUG nova.network.neutron [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 955.223467] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864924, 'name': PowerOffVM_Task, 'duration_secs': 0.448585} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.223467] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 955.223688] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 955.224668] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc291fd6-8da0-46ad-ae1f-8c2793fe5bea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.238249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 955.238249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73febe8f-fdc5-4c79-b815-90384d53c5cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.257019] env[69982]: DEBUG nova.network.neutron [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.321600] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa722e04-deae-4056-8da3-7ecd9f0b10a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.330599] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6e8c9a-6fb0-48ea-a8eb-970e15709992 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.386252] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe9fcba-10f8-4d52-98e7-eaaf221eeaca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.396067] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205f528-90ac-5791-df85-18ffdbda3725, 'name': SearchDatastore_Task, 'duration_secs': 0.01751} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.399753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.399753] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 5b79fc38-ace3-4f94-8d1c-b77912f44a1d/5b79fc38-ace3-4f94-8d1c-b77912f44a1d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.399979] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f48899ef-37f1-4143-9caa-7b7818f4c292 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.404085] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd55dc6-b523-45e8-b5a1-db237c03dc8a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.421981] env[69982]: DEBUG nova.compute.provider_tree [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 955.425251] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 955.425251] env[69982]: value = "task-3864927" [ 955.425251] env[69982]: _type = "Task" [ 955.425251] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.435889] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.480120] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864925, 'name': Rename_Task, 'duration_secs': 0.181505} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.482023] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.482023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36c0d462-205b-49fd-8fb6-f5b8636717e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.489481] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 955.489481] env[69982]: value = "task-3864928" [ 955.489481] env[69982]: _type = "Task" [ 955.489481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.503904] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.641087] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 955.641087] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 955.641087] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 955.641087] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5e97b02-fd3f-4b48-9a4f-baf6b90a89b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.651033] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 955.651033] env[69982]: value = "task-3864929" [ 955.651033] env[69982]: _type = "Task" [ 955.651033] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.664774] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.759522] env[69982]: DEBUG oslo_concurrency.lockutils [req-977e0f40-cc6e-4e24-aa77-ab29f71cd5ac req-46188599-3df5-4606-bd7c-fe06c3e7967b service nova] Releasing lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.760056] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.760319] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 955.956921] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864927, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.967246] env[69982]: ERROR nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [req-2726657e-0404-4534-a8b1-95c2fc6f9e87] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2726657e-0404-4534-a8b1-95c2fc6f9e87"}]} [ 956.001066] env[69982]: DEBUG nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 956.009985] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.047925] env[69982]: DEBUG nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 956.052025] env[69982]: DEBUG nova.compute.provider_tree [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 956.095600] env[69982]: DEBUG nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 956.141529] env[69982]: DEBUG nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 956.166031] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395733} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.166031] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 956.166031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 956.166031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 956.339299] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 956.450379] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58568} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.450786] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 5b79fc38-ace3-4f94-8d1c-b77912f44a1d/5b79fc38-ace3-4f94-8d1c-b77912f44a1d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.451308] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.454024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79fdd1a2-53c0-41d7-8143-82e05d3c2751 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.464504] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 956.464504] env[69982]: value = "task-3864930" [ 956.464504] env[69982]: _type = "Task" [ 956.464504] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.493519] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.509747] env[69982]: DEBUG oslo_vmware.api [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3864928, 'name': PowerOnVM_Task, 'duration_secs': 1.008585} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.511904] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.511904] env[69982]: INFO nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Took 8.73 seconds to spawn the instance on the hypervisor. [ 956.511904] env[69982]: DEBUG nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.511904] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27b930f-a66a-4ccf-866b-4c34a690d4dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.616791] env[69982]: DEBUG nova.network.neutron [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updating instance_info_cache with network_info: [{"id": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "address": "fa:16:3e:99:5b:16", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8e19350-a6", "ovs_interfaceid": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.846327] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d804f1-0b34-4964-ab3e-4a36d3783f7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.857930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4945598-f8ba-4804-ac84-ddc246af5399 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.897130] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f0e595-dd0a-46b2-8d38-01e4d5c9ceba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.903375] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddfd727-62c7-41e3-b089-fe590f543953 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.922791] env[69982]: DEBUG nova.compute.provider_tree [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 956.979283] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106885} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.979283] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 956.979825] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa6178b-c943-4bb9-a95d-2a3b075b0334 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.984310] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.987541] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.987541] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.987541] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.987541] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.005560] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] 5b79fc38-ace3-4f94-8d1c-b77912f44a1d/5b79fc38-ace3-4f94-8d1c-b77912f44a1d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.006276] env[69982]: INFO nova.compute.manager [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Terminating instance [ 957.008642] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5c5a46f-82b1-41bb-9ff5-6b027a16ae3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.036025] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 957.036025] env[69982]: value = "task-3864931" [ 957.036025] env[69982]: _type = "Task" [ 957.036025] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.040698] env[69982]: INFO nova.compute.manager [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Took 32.47 seconds to build instance. [ 957.048704] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864931, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.120958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.121409] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance network_info: |[{"id": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "address": "fa:16:3e:99:5b:16", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8e19350-a6", "ovs_interfaceid": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.121904] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:5b:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8e19350-a6cb-4da2-a745-4a03db30a50b', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.130794] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating folder: Project (afaf89b0250d46048813da25c754e1a6). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 957.131476] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31e9d3e0-4b01-4c11-8eed-98b3c9014327 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.146852] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created folder: Project (afaf89b0250d46048813da25c754e1a6) in parent group-v767796. [ 957.147371] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating folder: Instances. Parent ref: group-v768006. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 957.147977] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a4a5f981-d2db-4439-9882-4700a2032fb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.162650] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created folder: Instances in parent group-v768006. [ 957.162951] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.163257] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.163571] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0cd59035-fe0d-4fcd-ac7a-672cce7b1000 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.191216] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.191216] env[69982]: value = "task-3864934" [ 957.191216] env[69982]: _type = "Task" [ 957.191216] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.202282] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864934, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.238784] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 957.239121] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 957.239257] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 957.239551] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 957.239741] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 957.239904] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 957.240407] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 957.240611] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 957.240791] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 957.240997] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 957.241141] env[69982]: DEBUG nova.virt.hardware [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 957.244875] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890ef265-e0de-4b7d-9fac-2214cfdbe891 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.256904] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd06666f-12a0-4d3f-9523-902f48cbf55d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.277812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:e1:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '51876cd6-d373-4edc-8595-254e5d631378', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0e9d6238-fb82-48aa-8702-091435aae1b1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.287851] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.289743] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.289743] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52b2aabd-b40c-4490-bdd2-71025b9e2749 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.316698] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.316698] env[69982]: value = "task-3864935" [ 957.316698] env[69982]: _type = "Task" [ 957.316698] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.326794] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864935, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.484605] env[69982]: DEBUG nova.scheduler.client.report [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 957.484605] env[69982]: DEBUG nova.compute.provider_tree [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 97 to 98 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 957.484605] env[69982]: DEBUG nova.compute.provider_tree [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 957.526816] env[69982]: DEBUG nova.compute.manager [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 957.527260] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.528456] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7bd75c-314c-421c-9a23-bb18f645338c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.554019] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3bd0f838-bd0f-453b-ad25-dc7a80e0aa6a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.950s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.554019] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.554019] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf9050f9-2db1-4035-9b01-a8cba51004eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.558470] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864931, 'name': ReconfigVM_Task, 'duration_secs': 0.401364} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.560698] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Reconfigured VM instance instance-0000004a to attach disk [datastore2] 5b79fc38-ace3-4f94-8d1c-b77912f44a1d/5b79fc38-ace3-4f94-8d1c-b77912f44a1d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.560698] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b102dc71-e574-4885-ae00-1d37f2a129e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.566037] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 957.566037] env[69982]: value = "task-3864936" [ 957.566037] env[69982]: _type = "Task" [ 957.566037] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.572285] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 957.572285] env[69982]: value = "task-3864937" [ 957.572285] env[69982]: _type = "Task" [ 957.572285] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.580031] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.586358] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864937, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.703443] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864934, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.828234] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864935, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.992598] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.485s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.992598] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 957.998791] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.158s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.998791] env[69982]: INFO nova.compute.claims [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.082332] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864936, 'name': PowerOffVM_Task, 'duration_secs': 0.275126} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.082387] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 958.082536] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 958.083302] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb24f404-bb58-44d6-bb58-16b5f3e10457 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.090028] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864937, 'name': Rename_Task, 'duration_secs': 0.207967} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.090187] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.090321] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d7cc778-1659-4f8a-97c8-980d6cb4aead {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.097874] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 958.097874] env[69982]: value = "task-3864939" [ 958.097874] env[69982]: _type = "Task" [ 958.097874] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.109679] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864939, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.163136] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 958.163469] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 958.163625] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleting the datastore file [datastore2] a9a14fa8-7f58-48f9-994d-b5063833a81b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 958.164297] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-078fbd66-71c6-4128-9c9c-b790d1b64645 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.173952] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 958.173952] env[69982]: value = "task-3864940" [ 958.173952] env[69982]: _type = "Task" [ 958.173952] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.183583] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.203000] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864934, 'name': CreateVM_Task, 'duration_secs': 0.898274} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.203256] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.203950] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.204408] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.204637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.204902] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-265cee55-3b2a-485b-87f5-a95dc2b17d35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.210689] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 958.210689] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52665ed2-f0d2-1c67-1374-90f56b19974f" [ 958.210689] env[69982]: _type = "Task" [ 958.210689] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.222035] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52665ed2-f0d2-1c67-1374-90f56b19974f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.327871] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864935, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.504126] env[69982]: DEBUG nova.compute.utils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 958.513099] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 958.513509] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.561305] env[69982]: DEBUG nova.policy [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b41a52cdf4c34d9c9b86c0debf922a36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251f58d95d51416d9d9fd54aa14546e2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.609030] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864939, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.693152] env[69982]: DEBUG oslo_vmware.api [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3864940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338896} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.693557] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.693782] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.693984] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.694168] env[69982]: INFO nova.compute.manager [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 958.694484] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.694733] env[69982]: DEBUG nova.compute.manager [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 958.694839] env[69982]: DEBUG nova.network.neutron [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 958.722309] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52665ed2-f0d2-1c67-1374-90f56b19974f, 'name': SearchDatastore_Task, 'duration_secs': 0.015004} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.722663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.722946] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.723152] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.723308] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.723545] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.723796] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3482bb7b-aa4c-460b-98c9-5ae22a0602d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.735402] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.735613] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.736467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f42f4f40-ab7d-4971-bebf-e08a0e31e796 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.743095] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 958.743095] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5275f17b-2f02-0a11-fe74-631aa8f71d94" [ 958.743095] env[69982]: _type = "Task" [ 958.743095] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.752866] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5275f17b-2f02-0a11-fe74-631aa8f71d94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.829952] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864935, 'name': CreateVM_Task, 'duration_secs': 1.493166} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.830734] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.831705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.832043] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.834021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.834021] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cffa0893-2905-4d2f-b53a-cd9254a144fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.838451] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 958.838451] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523bf4d2-5151-66cd-2b5f-a5d6afd872df" [ 958.838451] env[69982]: _type = "Task" [ 958.838451] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.853933] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523bf4d2-5151-66cd-2b5f-a5d6afd872df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.940157] env[69982]: DEBUG nova.compute.manager [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Received event network-changed-18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.940157] env[69982]: DEBUG nova.compute.manager [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Refreshing instance network info cache due to event network-changed-18a2c6c8-1313-42eb-a058-40e272e7fda3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 958.940736] env[69982]: DEBUG oslo_concurrency.lockutils [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] Acquiring lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.940736] env[69982]: DEBUG oslo_concurrency.lockutils [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] Acquired lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.941089] env[69982]: DEBUG nova.network.neutron [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Refreshing network info cache for port 18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 959.013571] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 959.117021] env[69982]: DEBUG oslo_vmware.api [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864939, 'name': PowerOnVM_Task, 'duration_secs': 0.525718} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.118911] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.119348] env[69982]: INFO nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Took 8.62 seconds to spawn the instance on the hypervisor. [ 959.119729] env[69982]: DEBUG nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 959.120908] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Successfully created port: 1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.124228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e1ac8e-8743-44c1-9aaf-01477eccfd06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.256100] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5275f17b-2f02-0a11-fe74-631aa8f71d94, 'name': SearchDatastore_Task, 'duration_secs': 0.020604} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.260891] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81e29c43-e4df-4353-be41-5fee5b733038 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.267434] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 959.267434] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52434a6b-ca44-fa15-b588-457e43a07f64" [ 959.267434] env[69982]: _type = "Task" [ 959.267434] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.278630] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52434a6b-ca44-fa15-b588-457e43a07f64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.355406] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523bf4d2-5151-66cd-2b5f-a5d6afd872df, 'name': SearchDatastore_Task, 'duration_secs': 0.026664} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.355812] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.356258] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.356572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.357574] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.357688] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.360889] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1eca0c22-e59d-46b2-a838-3ffabbdc8d36 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.373433] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.373706] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.374679] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8ca9b7e-e1ad-4473-ae29-2a579f280ead {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.382989] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 959.382989] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d17223-932b-b9ca-e27c-1454a21ba5ed" [ 959.382989] env[69982]: _type = "Task" [ 959.382989] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.397475] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d17223-932b-b9ca-e27c-1454a21ba5ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.651573] env[69982]: INFO nova.compute.manager [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Took 33.21 seconds to build instance. [ 959.671045] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e42e68-cb31-4b31-9f5e-4da0225db4a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.683803] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bac97d-ae96-408b-9a32-23599f627e99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.718228] env[69982]: DEBUG nova.network.neutron [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.723085] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06bce86-19f7-480f-bd2e-520f6dc49604 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.735190] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbfce8c-32cf-44a0-b726-1938cbb0705b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.755676] env[69982]: DEBUG nova.compute.provider_tree [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.782440] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52434a6b-ca44-fa15-b588-457e43a07f64, 'name': SearchDatastore_Task, 'duration_secs': 0.016207} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.782778] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.783062] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6d390a12-bfb4-4d91-9e83-a81560a08e1a/6d390a12-bfb4-4d91-9e83-a81560a08e1a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.783367] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4c55bb5b-156f-4fb1-834a-c8aee1e71a81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.791492] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 959.791492] env[69982]: value = "task-3864941" [ 959.791492] env[69982]: _type = "Task" [ 959.791492] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.801315] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.897817] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d17223-932b-b9ca-e27c-1454a21ba5ed, 'name': SearchDatastore_Task, 'duration_secs': 0.017646} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.898714] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73157400-f972-4eb6-9611-f096e27407e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.902371] env[69982]: DEBUG nova.network.neutron [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updated VIF entry in instance network info cache for port 18a2c6c8-1313-42eb-a058-40e272e7fda3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 959.902722] env[69982]: DEBUG nova.network.neutron [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating instance_info_cache with network_info: [{"id": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "address": "fa:16:3e:38:fc:9e", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a2c6c8-13", "ovs_interfaceid": "18a2c6c8-1313-42eb-a058-40e272e7fda3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.911168] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 959.911168] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5288fd1a-de06-9589-f462-eef0b41e6399" [ 959.911168] env[69982]: _type = "Task" [ 959.911168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.921402] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5288fd1a-de06-9589-f462-eef0b41e6399, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.028027] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 960.077722] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 960.078037] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.078261] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 960.078460] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.078610] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 960.078757] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 960.078996] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 960.079192] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 960.079489] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 960.079644] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 960.079989] env[69982]: DEBUG nova.virt.hardware [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 960.080702] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c837513d-7220-4fe4-ae2c-eb8b31bc255f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.090409] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae73c93-d409-49d1-9353-dfb077cfa563 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.156562] env[69982]: DEBUG oslo_concurrency.lockutils [None req-13caea87-7fe8-4adc-bf17-b0ceb2b8e95d tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.826s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.227271] env[69982]: INFO nova.compute.manager [-] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Took 1.53 seconds to deallocate network for instance. [ 960.257354] env[69982]: DEBUG nova.scheduler.client.report [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.306671] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864941, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.406325] env[69982]: DEBUG oslo_concurrency.lockutils [req-34f7c9de-17f1-4f29-85af-0af13914363c req-7774ffe8-d43d-485d-bdd4-41c8cf74d2d4 service nova] Releasing lock "refresh_cache-6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.421837] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5288fd1a-de06-9589-f462-eef0b41e6399, 'name': SearchDatastore_Task, 'duration_secs': 0.019324} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.422332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.422658] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.423008] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eedecca0-2364-455e-85a5-6ea2fa9c8b78 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.434302] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 960.434302] env[69982]: value = "task-3864942" [ 960.434302] env[69982]: _type = "Task" [ 960.434302] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.444288] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.642721] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.642721] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.738063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.765199] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.765722] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 960.768347] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.487s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.769938] env[69982]: INFO nova.compute.claims [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.806167] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.832988} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.806860] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6d390a12-bfb4-4d91-9e83-a81560a08e1a/6d390a12-bfb4-4d91-9e83-a81560a08e1a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.807176] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.807524] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5cad32db-a8e6-4963-a7de-fe7ce996f119 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.816213] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 960.816213] env[69982]: value = "task-3864943" [ 960.816213] env[69982]: _type = "Task" [ 960.816213] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.826561] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864943, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.949953] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864942, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.991858] env[69982]: DEBUG nova.compute.manager [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.992178] env[69982]: DEBUG oslo_concurrency.lockutils [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.992416] env[69982]: DEBUG oslo_concurrency.lockutils [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.992593] env[69982]: DEBUG oslo_concurrency.lockutils [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.992762] env[69982]: DEBUG nova.compute.manager [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] No waiting events found dispatching network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.993030] env[69982]: WARNING nova.compute.manager [req-4a7b5822-9e18-4ccc-9739-58edf652f9c9 req-f428231b-9d9c-4708-b9e5-2fc6465afac8 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received unexpected event network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 for instance with vm_state building and task_state spawning. [ 961.039295] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Successfully updated port: 1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.103523] env[69982]: DEBUG nova.compute.manager [req-65b225da-7f13-479d-b503-a5690817bfb3 req-6dc6c805-bd16-4cfa-984c-d781f79f0178 service nova] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Received event network-vif-deleted-ae227fd2-200e-4e2b-9de5-a6a54ec3ef4f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.145474] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 961.281927] env[69982]: DEBUG nova.compute.utils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 961.281927] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 961.281927] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.329479] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864943, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084905} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.329828] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.330678] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55389cb0-243d-4afa-810d-a6b3c5aa94eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.356827] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 6d390a12-bfb4-4d91-9e83-a81560a08e1a/6d390a12-bfb4-4d91-9e83-a81560a08e1a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.357190] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-962fa457-f90d-4178-bf13-0d683c601054 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.372824] env[69982]: DEBUG nova.policy [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 961.382093] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 961.382093] env[69982]: value = "task-3864944" [ 961.382093] env[69982]: _type = "Task" [ 961.382093] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.395561] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864944, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.444952] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.820076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.445281] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.445595] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.445795] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81dcc389-1a98-4adf-9bf6-4489244f3e54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.453347] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 961.453347] env[69982]: value = "task-3864945" [ 961.453347] env[69982]: _type = "Task" [ 961.453347] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.464476] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864945, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.548480] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.548801] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.548864] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 961.700719] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.742345] env[69982]: DEBUG nova.compute.manager [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.743402] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ee9c4f-a956-4ded-ba37-30d174a1057d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.773501] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Successfully created port: 5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 961.785269] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 961.894088] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864944, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.963652] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864945, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179589} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.967010] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.968070] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ba7f52-d7e4-4184-8250-71b46092bb17 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.994080] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.998213] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa14f214-2868-4d15-8bc5-c4ebed255f20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.020492] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 962.020492] env[69982]: value = "task-3864946" [ 962.020492] env[69982]: _type = "Task" [ 962.020492] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.034406] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864946, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.096540] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 962.256953] env[69982]: INFO nova.compute.manager [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] instance snapshotting [ 962.260279] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80256f5d-d4da-47cc-a4c2-09107d27f97b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.288143] env[69982]: DEBUG nova.network.neutron [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.291171] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733358e7-365c-44d1-a665-f48f44e30212 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.395594] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864944, 'name': ReconfigVM_Task, 'duration_secs': 0.780437} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.396095] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 6d390a12-bfb4-4d91-9e83-a81560a08e1a/6d390a12-bfb4-4d91-9e83-a81560a08e1a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.396559] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74f0f363-dc75-441e-a5cf-f0ab1cade96b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.404310] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 962.404310] env[69982]: value = "task-3864947" [ 962.404310] env[69982]: _type = "Task" [ 962.404310] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.417524] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864947, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.443935] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2afd4c6-9f36-442b-8f37-1507152a473f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.453455] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b30b99-3042-4608-983b-05bf9daf8233 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.486167] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a84582-a5dc-4c55-b3eb-bd84d973b223 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.494410] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50a476f-c257-4397-bf5b-79925c5b183a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.508895] env[69982]: DEBUG nova.compute.provider_tree [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 962.531903] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.794155] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.794637] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance network_info: |[{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 962.795057] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:93:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ded08ab-b715-4b57-81f3-69d6383c5a74', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 962.803441] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating folder: Project (251f58d95d51416d9d9fd54aa14546e2). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 962.804604] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 962.806754] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ee836f5-b91d-4e97-a46f-241314ff1304 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.809613] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 962.809995] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0ada22a6-e88a-42af-ac49-b41f241d8761 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.821344] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 962.821344] env[69982]: value = "task-3864949" [ 962.821344] env[69982]: _type = "Task" [ 962.821344] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.827086] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created folder: Project (251f58d95d51416d9d9fd54aa14546e2) in parent group-v767796. [ 962.827325] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating folder: Instances. Parent ref: group-v768010. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 962.827822] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b39a796-06b8-407c-865a-b4c0506ce8ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.833771] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864949, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.838463] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created folder: Instances in parent group-v768010. [ 962.838782] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 962.838991] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 962.839239] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72f5e3d2-b0f6-40bb-b3eb-df4158707f9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.863935] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 962.864206] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 962.864364] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 962.864541] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 962.864684] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 962.864827] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 962.865049] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 962.865214] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 962.865427] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 962.865535] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 962.865704] env[69982]: DEBUG nova.virt.hardware [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 962.866632] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4060656-47d7-4d29-a4c9-f2b60dba78ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.870679] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 962.870679] env[69982]: value = "task-3864951" [ 962.870679] env[69982]: _type = "Task" [ 962.870679] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.878297] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2bb99b-8311-4b56-94e8-ad92ad47a6fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.886392] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864951, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.914506] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864947, 'name': Rename_Task, 'duration_secs': 0.287341} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.914828] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.915106] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4235d14-7d5e-47b4-b944-2c678351f7ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.923058] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 962.923058] env[69982]: value = "task-3864952" [ 962.923058] env[69982]: _type = "Task" [ 962.923058] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.933889] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.029580] env[69982]: DEBUG nova.compute.manager [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 963.029844] env[69982]: DEBUG nova.compute.manager [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing instance network info cache due to event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 963.030014] env[69982]: DEBUG oslo_concurrency.lockutils [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.030197] env[69982]: DEBUG oslo_concurrency.lockutils [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.030359] env[69982]: DEBUG nova.network.neutron [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 963.039017] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864946, 'name': ReconfigVM_Task, 'duration_secs': 0.717588} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.040315] env[69982]: ERROR nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [req-bc194d01-48e5-42ae-b845-96f73c7b19ed] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bc194d01-48e5-42ae-b845-96f73c7b19ed"}]} [ 963.041633] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a/9123b08c-d2ec-4c4d-bade-0acdae75640a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 963.044621] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a220ed60-63a2-4541-9e3a-20b09c9c4222 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.053168] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 963.053168] env[69982]: value = "task-3864953" [ 963.053168] env[69982]: _type = "Task" [ 963.053168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.067196] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864953, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.068366] env[69982]: DEBUG nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 963.092256] env[69982]: DEBUG nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 963.092591] env[69982]: DEBUG nova.compute.provider_tree [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.111033] env[69982]: DEBUG nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 963.142779] env[69982]: DEBUG nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 963.336617] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864949, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.387104] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864951, 'name': CreateVM_Task, 'duration_secs': 0.37358} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.387433] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.388174] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.388391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.388715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 963.389089] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49c414cb-7b5b-4b0b-9b2e-76497aeb10eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.398666] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 963.398666] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237ac51-cb88-e3e7-7c1c-59e93f4df5af" [ 963.398666] env[69982]: _type = "Task" [ 963.398666] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.409039] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237ac51-cb88-e3e7-7c1c-59e93f4df5af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.435849] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864952, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.538039] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Successfully updated port: 5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 963.569765] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864953, 'name': Rename_Task, 'duration_secs': 0.218833} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.569765] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.569765] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b85de815-feb8-4b07-9374-c25420f93091 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.580282] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 963.580282] env[69982]: value = "task-3864954" [ 963.580282] env[69982]: _type = "Task" [ 963.580282] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.589757] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.722623] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c45e32-bd16-419b-b176-e12ebfd2c884 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.732599] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0798417b-28b6-4c89-b116-870c88e427eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.765473] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da99229-a14d-4158-ae75-99fa0ca9e16e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.775928] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fdb4ad-d977-408a-9839-375af04fe84e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.792907] env[69982]: DEBUG nova.compute.provider_tree [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 963.833681] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864949, 'name': CreateSnapshot_Task, 'duration_secs': 0.702148} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.836446] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 963.837617] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e65c0e-357a-4459-8a5a-040e3d2d68dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.875749] env[69982]: DEBUG nova.network.neutron [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updated VIF entry in instance network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 963.876200] env[69982]: DEBUG nova.network.neutron [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.909993] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5237ac51-cb88-e3e7-7c1c-59e93f4df5af, 'name': SearchDatastore_Task, 'duration_secs': 0.015855} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.910342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.910581] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 963.910816] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.910959] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.911151] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 963.911472] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2488aee0-8098-49d5-921a-54fa65f3f15f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.920823] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 963.921025] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 963.921866] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aaed2a1-1ad1-49ef-9a0b-4b5449de71cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.932628] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 963.932628] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523ec40f-4c5a-f83f-fc59-e5d115c85ff9" [ 963.932628] env[69982]: _type = "Task" [ 963.932628] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.935473] env[69982]: DEBUG oslo_vmware.api [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3864952, 'name': PowerOnVM_Task, 'duration_secs': 0.650632} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.938957] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.939204] env[69982]: INFO nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Took 10.67 seconds to spawn the instance on the hypervisor. [ 963.939509] env[69982]: DEBUG nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 963.940253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2748686-6499-420f-bf60-09fd256eac45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.955527] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523ec40f-4c5a-f83f-fc59-e5d115c85ff9, 'name': SearchDatastore_Task, 'duration_secs': 0.014568} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.956865] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82686210-3345-4436-9cd0-012dda83628d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.963672] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 963.963672] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e3257-b5bf-9f3c-0367-28ffa8881177" [ 963.963672] env[69982]: _type = "Task" [ 963.963672] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.973374] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e3257-b5bf-9f3c-0367-28ffa8881177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.039919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.039973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.040502] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 964.092205] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864954, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.339636] env[69982]: DEBUG nova.scheduler.client.report [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 964.339972] env[69982]: DEBUG nova.compute.provider_tree [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 99 to 100 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 964.340183] env[69982]: DEBUG nova.compute.provider_tree [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 964.366893] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 964.366893] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-65436b95-c432-413e-b3be-b4301a376103 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.374380] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 964.374380] env[69982]: value = "task-3864955" [ 964.374380] env[69982]: _type = "Task" [ 964.374380] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.378649] env[69982]: DEBUG oslo_concurrency.lockutils [req-7d536a71-40a4-40ac-b258-815a4b640865 req-bae43082-bf16-4493-9062-b8d349670782 service nova] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.386265] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864955, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.468786] env[69982]: INFO nova.compute.manager [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Took 32.89 seconds to build instance. [ 964.475886] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520e3257-b5bf-9f3c-0367-28ffa8881177, 'name': SearchDatastore_Task, 'duration_secs': 0.017762} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.476238] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.476550] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 964.476846] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-234a08aa-f6a8-4923-b7ef-4f32ac5a37ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.484779] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 964.484779] env[69982]: value = "task-3864956" [ 964.484779] env[69982]: _type = "Task" [ 964.484779] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.495270] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.581431] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 964.596371] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864954, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.851025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.080s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.851025] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 964.853896] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.292s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.854210] env[69982]: DEBUG nova.objects.instance [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lazy-loading 'resources' on Instance uuid 67613f71-a91e-4dae-8a6c-cd74c4821339 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.872765] env[69982]: DEBUG nova.network.neutron [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Updating instance_info_cache with network_info: [{"id": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "address": "fa:16:3e:da:b3:63", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45ab8d-a1", "ovs_interfaceid": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.887320] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864955, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.971739] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50807122-d453-4596-8ca0-14cc9fb4355b tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.790s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.996140] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864956, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.095522] env[69982]: DEBUG oslo_vmware.api [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864954, 'name': PowerOnVM_Task, 'duration_secs': 1.214582} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.095812] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.096035] env[69982]: DEBUG nova.compute.manager [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 965.096984] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225b1f94-5dbc-4425-a0b9-86b032313c18 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.143189] env[69982]: DEBUG nova.compute.manager [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Received event network-vif-plugged-5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.143189] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Acquiring lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.143189] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.144282] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.144657] env[69982]: DEBUG nova.compute.manager [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] No waiting events found dispatching network-vif-plugged-5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 965.145057] env[69982]: WARNING nova.compute.manager [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Received unexpected event network-vif-plugged-5e45ab8d-a115-4ed7-a813-da3d3003694f for instance with vm_state building and task_state spawning. [ 965.146527] env[69982]: DEBUG nova.compute.manager [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Received event network-changed-5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.146527] env[69982]: DEBUG nova.compute.manager [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Refreshing instance network info cache due to event network-changed-5e45ab8d-a115-4ed7-a813-da3d3003694f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 965.146527] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Acquiring lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.358179] env[69982]: DEBUG nova.compute.utils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.363057] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.363057] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.375892] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.378138] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Instance network_info: |[{"id": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "address": "fa:16:3e:da:b3:63", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45ab8d-a1", "ovs_interfaceid": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.378138] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Acquired lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.378353] env[69982]: DEBUG nova.network.neutron [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Refreshing network info cache for port 5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 965.378353] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:b3:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5e45ab8d-a115-4ed7-a813-da3d3003694f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.386658] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.391316] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.391884] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0eb56c87-a569-4480-81f3-b76a2b34e58e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.416278] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864955, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.417740] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.417740] env[69982]: value = "task-3864957" [ 965.417740] env[69982]: _type = "Task" [ 965.417740] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.428643] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864957, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.478371] env[69982]: DEBUG nova.policy [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.496553] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668103} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.496918] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.497175] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.497507] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39946f49-3291-4926-b6d1-1231fc3de72f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.506240] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 965.506240] env[69982]: value = "task-3864958" [ 965.506240] env[69982]: _type = "Task" [ 965.506240] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.517235] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.615067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.868404] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 965.894580] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864955, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.952280] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864957, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.976338] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c281a4-9937-4f1e-91fb-de4ce5f2f961 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.986882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9543d185-b796-4dff-aac6-f2fa7461c336 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.028536] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1818ff59-a38d-44ba-bf38-9df4297ad2b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.042537] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075715} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.043092] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b4a22f-8a42-4281-8310-d7815a7de1bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.049114] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.050147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84f2e72-b8aa-43d3-8a8b-40c28c0d28f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.068367] env[69982]: DEBUG nova.compute.provider_tree [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 966.089144] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.095534] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b448851e-5ea4-4505-a2dd-ca87f4ef790f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.117902] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 966.117902] env[69982]: value = "task-3864959" [ 966.117902] env[69982]: _type = "Task" [ 966.117902] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.129700] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864959, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.172120] env[69982]: DEBUG nova.scheduler.client.report [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 966.172478] env[69982]: DEBUG nova.compute.provider_tree [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 100 to 101 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 966.172757] env[69982]: DEBUG nova.compute.provider_tree [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 966.292730] env[69982]: DEBUG nova.compute.manager [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Received event network-changed-a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.293084] env[69982]: DEBUG nova.compute.manager [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Refreshing instance network info cache due to event network-changed-a8e19350-a6cb-4da2-a745-4a03db30a50b. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 966.293290] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] Acquiring lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.293323] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] Acquired lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.293464] env[69982]: DEBUG nova.network.neutron [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Refreshing network info cache for port a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 966.307405] env[69982]: DEBUG nova.network.neutron [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Updated VIF entry in instance network info cache for port 5e45ab8d-a115-4ed7-a813-da3d3003694f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 966.308633] env[69982]: DEBUG nova.network.neutron [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Updating instance_info_cache with network_info: [{"id": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "address": "fa:16:3e:da:b3:63", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5e45ab8d-a1", "ovs_interfaceid": "5e45ab8d-a115-4ed7-a813-da3d3003694f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.392046] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864955, 'name': CloneVM_Task, 'duration_secs': 1.794496} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.392440] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Created linked-clone VM from snapshot [ 966.393174] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2769ec47-c2e9-46d7-9a4a-f6b6ba3c96b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.402773] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Uploading image 057a9ace-628f-4266-bea0-df2668c5b5da {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 966.421628] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 966.422175] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0f2e6f70-d390-4d2c-b4cf-1055d11bbfe1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.443208] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864957, 'name': CreateVM_Task, 'duration_secs': 0.590501} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.445914] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.446892] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 966.446892] env[69982]: value = "task-3864960" [ 966.446892] env[69982]: _type = "Task" [ 966.446892] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.448337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.451365] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.451744] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 966.452727] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-734eebad-f796-4590-99cf-6d14f19f1c49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.461463] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 966.462718] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d5738f-6df1-49c2-b309-c8e7de81fd62 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.476694] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 966.476694] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d23b8c-1323-e224-3d48-4aa3aaeda80c" [ 966.476694] env[69982]: _type = "Task" [ 966.476694] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.478018] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864960, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.480082] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Successfully created port: 73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.482803] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 966.483125] env[69982]: ERROR oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk due to incomplete transfer. [ 966.488247] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-521c6048-ccf3-4c72-8ee2-ced1b4c1dda2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.496629] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d23b8c-1323-e224-3d48-4aa3aaeda80c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.507284] env[69982]: DEBUG oslo_vmware.rw_handles [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520b5e90-7e2d-1f1d-5a0e-02e2cff1dea3/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 966.507775] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploaded image e6d8946f-e6b4-47f7-a539-ec50ec4ec3aa to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 966.510529] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 966.511015] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1c847358-950f-4926-87ab-183517493571 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.521288] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 966.521288] env[69982]: value = "task-3864961" [ 966.521288] env[69982]: _type = "Task" [ 966.521288] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.532067] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864961, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.630213] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864959, 'name': ReconfigVM_Task, 'duration_secs': 0.371701} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.630668] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Reconfigured VM instance instance-0000004c to attach disk [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.631501] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-66b37af5-c0e7-4e93-8809-2763202d7b96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.640042] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 966.640042] env[69982]: value = "task-3864962" [ 966.640042] env[69982]: _type = "Task" [ 966.640042] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.652544] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864962, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.683025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.827s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.684166] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.791s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.686851] env[69982]: INFO nova.compute.claims [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 966.736220] env[69982]: INFO nova.scheduler.client.report [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Deleted allocations for instance 67613f71-a91e-4dae-8a6c-cd74c4821339 [ 966.813248] env[69982]: DEBUG oslo_concurrency.lockutils [req-a129b9bc-0466-456e-a633-333d38019ff5 req-1d3115e0-5d58-4148-b27b-3e5a61d918a2 service nova] Releasing lock "refresh_cache-5100234f-ea02-40bf-b883-fa9a159c7637" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.878462] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 966.915686] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 966.915941] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 966.916115] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 966.916319] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 966.916462] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 966.916607] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 966.916820] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 966.916978] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 966.917175] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 966.917478] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 966.917478] env[69982]: DEBUG nova.virt.hardware [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 966.919014] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f75c905-66bd-4cf2-ba68-271cef29b94e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.932119] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121d6c09-6dec-44c5-8fcc-f8d50664eebe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.965100] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864960, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.993881] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d23b8c-1323-e224-3d48-4aa3aaeda80c, 'name': SearchDatastore_Task, 'duration_secs': 0.026602} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.994247] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.994485] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.994729] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.994873] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.995069] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 966.995373] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7323db0-92cb-403d-8c56-e844913cceef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.006203] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.006414] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 967.007224] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8490b2cf-a9c6-4fcd-ab2b-4631d98fca3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.015318] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 967.015318] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dd5e1b-99bf-8bbf-8bf6-6744922d2a1a" [ 967.015318] env[69982]: _type = "Task" [ 967.015318] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.031811] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dd5e1b-99bf-8bbf-8bf6-6744922d2a1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.036830] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864961, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.162081] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864962, 'name': Rename_Task, 'duration_secs': 0.166798} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.162385] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.162820] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a476b7d-6f51-4359-8a42-bcac9bf33f2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.175983] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 967.175983] env[69982]: value = "task-3864963" [ 967.175983] env[69982]: _type = "Task" [ 967.175983] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.184287] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864963, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.231067] env[69982]: DEBUG nova.network.neutron [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updated VIF entry in instance network info cache for port a8e19350-a6cb-4da2-a745-4a03db30a50b. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 967.231582] env[69982]: DEBUG nova.network.neutron [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updating instance_info_cache with network_info: [{"id": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "address": "fa:16:3e:99:5b:16", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8e19350-a6", "ovs_interfaceid": "a8e19350-a6cb-4da2-a745-4a03db30a50b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.245984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cda336e-2839-4a42-9c82-5f21a2c28b10 tempest-AttachInterfacesV270Test-1674264032 tempest-AttachInterfacesV270Test-1674264032-project-member] Lock "67613f71-a91e-4dae-8a6c-cd74c4821339" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.371s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.462667] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864960, 'name': Destroy_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.530656] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dd5e1b-99bf-8bbf-8bf6-6744922d2a1a, 'name': SearchDatastore_Task, 'duration_secs': 0.02168} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.532073] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bcc3c32-a901-4151-8e41-e9966cf3220c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.538629] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864961, 'name': Destroy_Task, 'duration_secs': 0.649084} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.539414] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroyed the VM [ 967.539694] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 967.539976] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e740396-d645-495c-9642-7aafeb4ec2ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.543237] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 967.543237] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e150f6-b958-348d-fad9-af86bce8f19a" [ 967.543237] env[69982]: _type = "Task" [ 967.543237] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.548381] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 967.548381] env[69982]: value = "task-3864964" [ 967.548381] env[69982]: _type = "Task" [ 967.548381] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.557209] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e150f6-b958-348d-fad9-af86bce8f19a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.565820] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864964, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.688055] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864963, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.734565] env[69982]: DEBUG oslo_concurrency.lockutils [req-b4daca6d-08fc-4e50-aab8-8bdb5bd0f255 req-06a9aa9d-a243-4601-8990-e1466e35f5c9 service nova] Releasing lock "refresh_cache-6d390a12-bfb4-4d91-9e83-a81560a08e1a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.778729] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.780029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.780029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.780029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.780029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.782407] env[69982]: INFO nova.compute.manager [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Terminating instance [ 967.966796] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864960, 'name': Destroy_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.059927] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e150f6-b958-348d-fad9-af86bce8f19a, 'name': SearchDatastore_Task, 'duration_secs': 0.020891} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.061076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.061366] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5100234f-ea02-40bf-b883-fa9a159c7637/5100234f-ea02-40bf-b883-fa9a159c7637.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 968.061648] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69a45f42-0156-4ec6-91d1-0fcf1dd4f49f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.067049] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864964, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.072397] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 968.072397] env[69982]: value = "task-3864965" [ 968.072397] env[69982]: _type = "Task" [ 968.072397] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.086092] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.189427] env[69982]: DEBUG oslo_vmware.api [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3864963, 'name': PowerOnVM_Task, 'duration_secs': 0.517381} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.189721] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.189947] env[69982]: INFO nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Took 8.16 seconds to spawn the instance on the hypervisor. [ 968.190119] env[69982]: DEBUG nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.191745] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f640f667-5c5d-421c-9240-1ba463ca5dff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.255229] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a86f66-4c41-496e-b5a6-1d98cc76016e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.268030] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec30c085-c892-402f-a665-fe27ba89f00f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.304694] env[69982]: DEBUG nova.compute.manager [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.305034] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.306204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9214f1-2d93-4a75-8bd9-76e8c7a74c37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.309882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c3d4c1-5a95-4b9f-ae3e-bf8beb35708b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.321624] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7da9e00-ba81-4f5b-af71-53cbe74b5eac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.325864] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.326170] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62054557-2b4c-4f1b-bc2e-50297479e86b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.339761] env[69982]: DEBUG nova.compute.provider_tree [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.343464] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 968.343464] env[69982]: value = "task-3864966" [ 968.343464] env[69982]: _type = "Task" [ 968.343464] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.344569] env[69982]: DEBUG nova.compute.manager [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Received event network-vif-plugged-73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.344808] env[69982]: DEBUG oslo_concurrency.lockutils [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] Acquiring lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.345140] env[69982]: DEBUG oslo_concurrency.lockutils [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.345345] env[69982]: DEBUG oslo_concurrency.lockutils [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.345521] env[69982]: DEBUG nova.compute.manager [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] No waiting events found dispatching network-vif-plugged-73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 968.345750] env[69982]: WARNING nova.compute.manager [req-f5065621-1a84-48f9-aab3-b3ef496105b2 req-ccb3b875-3747-4068-8eb3-58e4f13f7c1a service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Received unexpected event network-vif-plugged-73bae6c7-ad78-4ee7-921b-f3fa33dbae08 for instance with vm_state building and task_state spawning. [ 968.357733] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864966, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.468437] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864960, 'name': Destroy_Task, 'duration_secs': 1.623539} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.468747] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Destroyed the VM [ 968.468955] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 968.469252] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0180631c-7ea2-49da-b3f3-c9f1623797ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.478657] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 968.478657] env[69982]: value = "task-3864967" [ 968.478657] env[69982]: _type = "Task" [ 968.478657] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.491530] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.564844] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864964, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.565625] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Successfully updated port: 73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 968.585120] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864965, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.715960] env[69982]: INFO nova.compute.manager [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Took 33.41 seconds to build instance. [ 968.850366] env[69982]: DEBUG nova.scheduler.client.report [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.861937] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864966, 'name': PowerOffVM_Task, 'duration_secs': 0.300806} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.862568] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 968.862915] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.863330] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cf82f68-111e-4716-8848-177456fd70ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.955174] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.955174] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.955174] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore1] 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.955174] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b095ca90-475b-4c9f-ae76-9d52b5c35521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.966318] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 968.966318] env[69982]: value = "task-3864969" [ 968.966318] env[69982]: _type = "Task" [ 968.966318] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.982137] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.992379] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.060900] env[69982]: DEBUG oslo_vmware.api [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864964, 'name': RemoveSnapshot_Task, 'duration_secs': 1.417761} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.061530] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 969.061603] env[69982]: INFO nova.compute.manager [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 20.18 seconds to snapshot the instance on the hypervisor. [ 969.068136] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.068273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.068424] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 969.085659] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864965, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760901} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.085928] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5100234f-ea02-40bf-b883-fa9a159c7637/5100234f-ea02-40bf-b883-fa9a159c7637.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 969.086173] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 969.086433] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b96dea3-619a-4d24-95a6-80940283d3eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.095573] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 969.095573] env[69982]: value = "task-3864970" [ 969.095573] env[69982]: _type = "Task" [ 969.095573] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.107617] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864970, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.218350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-65bb329e-7ae1-412c-9371-38f4a760b477 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.231s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.356505] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.357303] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 969.361528] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.004s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.361980] env[69982]: DEBUG nova.objects.instance [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lazy-loading 'resources' on Instance uuid 570675a8-3ec0-4fe6-b123-d3901d56b8cf {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.484340] env[69982]: DEBUG oslo_vmware.api [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3864969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.428812} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.489895] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.490386] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.490751] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.491283] env[69982]: INFO nova.compute.manager [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Took 1.19 seconds to destroy the instance on the hypervisor. [ 969.491664] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.492023] env[69982]: DEBUG nova.compute.manager [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.492284] env[69982]: DEBUG nova.network.neutron [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 969.502821] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.603662] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 969.611873] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864970, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149799} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.612179] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.613203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e92b0e-11f2-419b-9b86-21297cbd021d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.652190] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 5100234f-ea02-40bf-b883-fa9a159c7637/5100234f-ea02-40bf-b883-fa9a159c7637.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.666397] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58bd8d36-fd91-4969-a612-77de95835218 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.700550] env[69982]: DEBUG nova.compute.manager [None req-f8f4ffe3-768a-49ee-9a96-107317976298 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Found 1 images (rotation: 2) {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 969.704980] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 969.704980] env[69982]: value = "task-3864971" [ 969.704980] env[69982]: _type = "Task" [ 969.704980] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.712448] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864971, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.865820] env[69982]: DEBUG nova.compute.utils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 969.867321] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 969.867806] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 969.904358] env[69982]: DEBUG nova.network.neutron [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Updating instance_info_cache with network_info: [{"id": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "address": "fa:16:3e:24:e6:f4", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73bae6c7-ad", "ovs_interfaceid": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.973884] env[69982]: DEBUG nova.policy [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08d17e3ac5f40c6890dc8dcc4c559d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efe64e69253d49a6a1146f240506ce39', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 970.003945] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.218674] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864971, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.386184] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 970.411637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.412512] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Instance network_info: |[{"id": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "address": "fa:16:3e:24:e6:f4", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73bae6c7-ad", "ovs_interfaceid": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 970.412691] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:e6:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73bae6c7-ad78-4ee7-921b-f3fa33dbae08', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 970.421617] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 970.421905] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 970.422793] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f2b2ada-02a1-43d8-94c7-477174b38f38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.451520] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 970.451520] env[69982]: value = "task-3864972" [ 970.451520] env[69982]: _type = "Task" [ 970.451520] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.464061] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864972, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.467866] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Successfully created port: 73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 970.499169] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.510028] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c299c0cb-36ab-471e-ad15-afa41589be58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.524656] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72b5af8-37b0-4a5a-a6db-e1f2a53d366d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.556333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd3c40e-c0b7-4084-86a2-7a5f79b0b507 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.565532] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5726f7d8-e50d-4e75-80fd-1b3e36e99698 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.582634] env[69982]: DEBUG nova.compute.provider_tree [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.608170] env[69982]: DEBUG nova.network.neutron [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.715031] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864971, 'name': ReconfigVM_Task, 'duration_secs': 0.638306} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.715361] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 5100234f-ea02-40bf-b883-fa9a159c7637/5100234f-ea02-40bf-b883-fa9a159c7637.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.716017] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45924e6b-8806-4663-8328-84e0a3bf16ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.724840] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 970.724840] env[69982]: value = "task-3864973" [ 970.724840] env[69982]: _type = "Task" [ 970.724840] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.734548] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864973, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.924373] env[69982]: DEBUG nova.compute.manager [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Received event network-changed-73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.924373] env[69982]: DEBUG nova.compute.manager [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Refreshing instance network info cache due to event network-changed-73bae6c7-ad78-4ee7-921b-f3fa33dbae08. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 970.924373] env[69982]: DEBUG oslo_concurrency.lockutils [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] Acquiring lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.924373] env[69982]: DEBUG oslo_concurrency.lockutils [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] Acquired lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.924373] env[69982]: DEBUG nova.network.neutron [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Refreshing network info cache for port 73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 970.967032] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864972, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.998630] env[69982]: DEBUG oslo_vmware.api [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864967, 'name': RemoveSnapshot_Task, 'duration_secs': 2.046792} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.998944] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 971.086363] env[69982]: DEBUG nova.scheduler.client.report [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.113681] env[69982]: INFO nova.compute.manager [-] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Took 1.62 seconds to deallocate network for instance. [ 971.180632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "475a403c-bfdb-4239-b0d4-3baca441603f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.180792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.238506] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864973, 'name': Rename_Task, 'duration_secs': 0.169359} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.238824] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 971.239207] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7125132d-945a-4719-a35e-4affb65ddf2e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.250783] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 971.250783] env[69982]: value = "task-3864974" [ 971.250783] env[69982]: _type = "Task" [ 971.250783] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.261584] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864974, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.376688] env[69982]: DEBUG nova.compute.manager [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.377807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb9fc74-a25f-40a6-8080-b74e309de3dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.405939] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 971.463131] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864972, 'name': CreateVM_Task, 'duration_secs': 0.536554} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.466022] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 971.468425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.468425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.468425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 971.468425] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cd73fb4-8753-4032-a3f1-24d16788f518 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.475889] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 971.476176] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 971.476337] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 971.476519] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 971.476662] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 971.477501] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 971.477501] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 971.477501] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 971.477501] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 971.477501] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 971.477824] env[69982]: DEBUG nova.virt.hardware [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 971.479026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218fac41-0c8a-4dc4-88d0-562847b1b018 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.485690] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 971.485690] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526952c2-af5e-18c8-3114-5b7b1b37e63a" [ 971.485690] env[69982]: _type = "Task" [ 971.485690] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.496214] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3c6fb7-bf23-44c9-bb14-85fdcab0332c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.508375] env[69982]: WARNING nova.compute.manager [None req-040d36c9-0f71-4f1d-b89f-99ea87b46f5e tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Image not found during snapshot: nova.exception.ImageNotFound: Image 057a9ace-628f-4266-bea0-df2668c5b5da could not be found. [ 971.509662] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526952c2-af5e-18c8-3114-5b7b1b37e63a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.591960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.230s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.597217] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.927s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.597691] env[69982]: DEBUG nova.objects.instance [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lazy-loading 'resources' on Instance uuid 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.622563] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.648515] env[69982]: INFO nova.scheduler.client.report [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleted allocations for instance 570675a8-3ec0-4fe6-b123-d3901d56b8cf [ 971.684436] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 971.765146] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864974, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.889749] env[69982]: INFO nova.compute.manager [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] instance snapshotting [ 971.890424] env[69982]: DEBUG nova.objects.instance [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 971.952761] env[69982]: DEBUG nova.network.neutron [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Updated VIF entry in instance network info cache for port 73bae6c7-ad78-4ee7-921b-f3fa33dbae08. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 971.953232] env[69982]: DEBUG nova.network.neutron [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Updating instance_info_cache with network_info: [{"id": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "address": "fa:16:3e:24:e6:f4", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73bae6c7-ad", "ovs_interfaceid": "73bae6c7-ad78-4ee7-921b-f3fa33dbae08", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.003938] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526952c2-af5e-18c8-3114-5b7b1b37e63a, 'name': SearchDatastore_Task, 'duration_secs': 0.021903} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.004861] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.005206] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 972.005491] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.005885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.005885] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 972.006107] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-108ab2e9-b628-4b30-aa6a-a60d67396a86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.016759] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 972.017281] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 972.017783] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20557e9a-d7b3-4fb6-85a5-0808fd6d500a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.024560] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 972.024560] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52060cc2-e25d-6851-a058-d1a3a5048c22" [ 972.024560] env[69982]: _type = "Task" [ 972.024560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.033570] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52060cc2-e25d-6851-a058-d1a3a5048c22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.091935] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.092321] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.092625] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.092922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.093167] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.095718] env[69982]: INFO nova.compute.manager [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Terminating instance [ 972.164937] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8513520-02f6-427c-9567-e864124fdb41 tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "570675a8-3ec0-4fe6-b123-d3901d56b8cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.312s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.208099] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.263354] env[69982]: DEBUG oslo_vmware.api [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864974, 'name': PowerOnVM_Task, 'duration_secs': 0.557454} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.263354] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.263354] env[69982]: INFO nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Took 9.46 seconds to spawn the instance on the hypervisor. [ 972.263497] env[69982]: DEBUG nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.264610] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b9fb19-de03-4064-a1ed-8d2a6a210eb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.351057] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Successfully updated port: 73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 972.403504] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c9e9f2-e7ea-4f2a-9391-e34eae3b804e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.428810] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05555339-bd67-4a42-9f79-ad1ac4d011d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.457237] env[69982]: DEBUG oslo_concurrency.lockutils [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] Releasing lock "refresh_cache-56ad2f3f-c24f-446e-8df7-09fde60ba6cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.457517] env[69982]: DEBUG nova.compute.manager [req-10c525af-a700-41dd-b976-28b7d7d6233b req-8f82f391-da5d-47c5-a207-641609ca1af7 service nova] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Received event network-vif-deleted-c2bd72f6-ab11-4b22-aa80-ce6071d3ffd4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.536831] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52060cc2-e25d-6851-a058-d1a3a5048c22, 'name': SearchDatastore_Task, 'duration_secs': 0.02092} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.540485] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edd36e6b-7ad6-47c3-a159-9439bddf4da6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.547221] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 972.547221] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527cb337-4c2b-de63-dd16-45800b7707e2" [ 972.547221] env[69982]: _type = "Task" [ 972.547221] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.559947] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527cb337-4c2b-de63-dd16-45800b7707e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.599817] env[69982]: DEBUG nova.compute.manager [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 972.600071] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.600968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bb1937-bbdb-4c08-9e02-f56fbb0eb7f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.613470] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.613754] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-554f6a00-2f0c-4bb6-9457-f6276e68bf7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.621460] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 972.621460] env[69982]: value = "task-3864975" [ 972.621460] env[69982]: _type = "Task" [ 972.621460] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.640012] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.659718] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5108451-372a-4441-967b-ca9b2f118a4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.669643] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1768bde-b557-4e41-8e43-676f581fe712 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.713023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eeadb3-66d0-4ec3-8128-d74ea89c1d0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.721127] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143402da-7c96-4c83-b342-cf42ee288e10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.727095] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.727095] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.727384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.727516] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.727674] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.730161] env[69982]: INFO nova.compute.manager [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Terminating instance [ 972.742064] env[69982]: DEBUG nova.compute.provider_tree [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.787380] env[69982]: INFO nova.compute.manager [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Took 36.98 seconds to build instance. [ 972.854061] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 972.854314] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 972.854512] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 972.942299] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 972.942701] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e3a43c11-838d-4263-aa62-26495ac73b93 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.953792] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 972.953792] env[69982]: value = "task-3864976" [ 972.953792] env[69982]: _type = "Task" [ 972.953792] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.966803] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864976, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.038843] env[69982]: DEBUG nova.compute.manager [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Received event network-vif-plugged-73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.039076] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Acquiring lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.039378] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.039954] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.039954] env[69982]: DEBUG nova.compute.manager [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] No waiting events found dispatching network-vif-plugged-73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 973.040237] env[69982]: WARNING nova.compute.manager [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Received unexpected event network-vif-plugged-73909075-891e-4fc4-a912-c3757fcda156 for instance with vm_state building and task_state spawning. [ 973.040364] env[69982]: DEBUG nova.compute.manager [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Received event network-changed-73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.041703] env[69982]: DEBUG nova.compute.manager [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Refreshing instance network info cache due to event network-changed-73909075-891e-4fc4-a912-c3757fcda156. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 973.041703] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Acquiring lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.061456] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527cb337-4c2b-de63-dd16-45800b7707e2, 'name': SearchDatastore_Task, 'duration_secs': 0.039922} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.061828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.062114] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 56ad2f3f-c24f-446e-8df7-09fde60ba6cd/56ad2f3f-c24f-446e-8df7-09fde60ba6cd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 973.062429] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50622706-6c63-434d-9ff2-30ecc90fede1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.072313] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 973.072313] env[69982]: value = "task-3864977" [ 973.072313] env[69982]: _type = "Task" [ 973.072313] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.081327] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.136380] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864975, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.180864] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.181284] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.181568] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.181771] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.181948] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.186219] env[69982]: INFO nova.compute.manager [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Terminating instance [ 973.244618] env[69982]: DEBUG nova.compute.manager [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.245340] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.246450] env[69982]: DEBUG nova.scheduler.client.report [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.253180] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e332bb-d2f5-4532-969d-7aa604093981 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.265800] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.266116] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-370accd0-3415-43e8-9bd7-54b0dfceb570 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.276604] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 973.276604] env[69982]: value = "task-3864978" [ 973.276604] env[69982]: _type = "Task" [ 973.276604] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.288401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-33be8a58-aa2b-4992-8655-2ff83dbe16b4 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.115s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.289318] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.398076] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 973.406658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.411054] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.468402] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864976, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.575864] env[69982]: DEBUG nova.network.neutron [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updating instance_info_cache with network_info: [{"id": "73909075-891e-4fc4-a912-c3757fcda156", "address": "fa:16:3e:52:ee:b1", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73909075-89", "ovs_interfaceid": "73909075-891e-4fc4-a912-c3757fcda156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.583923] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864977, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.634572] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864975, 'name': PowerOffVM_Task, 'duration_secs': 0.565494} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.634572] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.634818] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.634890] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffff0ecd-3119-4e9e-96bf-b114f9f4ecc1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.693045] env[69982]: DEBUG nova.compute.manager [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.693277] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.694249] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961213cc-92b4-434e-96be-7b6aecb3cf3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.704503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.704795] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74cdb907-31e2-4d0a-8274-1575d1754eae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.712121] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 973.712121] env[69982]: value = "task-3864980" [ 973.712121] env[69982]: _type = "Task" [ 973.712121] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.722076] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.725962] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.726282] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.726496] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleting the datastore file [datastore2] 5b79fc38-ace3-4f94-8d1c-b77912f44a1d {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.726800] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed0c1528-ed6d-4b7b-8c5c-8136fa0f6c95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.733937] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 973.733937] env[69982]: value = "task-3864981" [ 973.733937] env[69982]: _type = "Task" [ 973.733937] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.738755] env[69982]: DEBUG oslo_concurrency.lockutils [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5100234f-ea02-40bf-b883-fa9a159c7637" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.739039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.739290] env[69982]: DEBUG nova.compute.manager [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 973.740074] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5c7a83-c57b-4ab8-b5ad-9c34e91b2b39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.750678] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.753055] env[69982]: DEBUG nova.compute.manager [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 973.753521] env[69982]: DEBUG nova.objects.instance [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'flavor' on Instance uuid 5100234f-ea02-40bf-b883-fa9a159c7637 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.760924] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.165s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.766053] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.507s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.768449] env[69982]: INFO nova.compute.claims [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 973.788899] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864978, 'name': PowerOffVM_Task, 'duration_secs': 0.392784} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.789208] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.789378] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.789653] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c2cc713-b479-4cf5-9911-59eed46cce0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.804684] env[69982]: INFO nova.scheduler.client.report [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Deleted allocations for instance 35fdf25e-c8c0-4123-a95a-2a4c1a504f99 [ 973.869913] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.869913] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.870105] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleting the datastore file [datastore1] ff2c680a-211a-44ad-b00d-1037f1fcb856 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.870515] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-533a0834-0a38-4fe9-9f87-560dbafbc03e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.878339] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for the task: (returnval){ [ 973.878339] env[69982]: value = "task-3864983" [ 973.878339] env[69982]: _type = "Task" [ 973.878339] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.888216] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864983, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.916877] env[69982]: DEBUG nova.compute.utils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 973.970957] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864976, 'name': CreateSnapshot_Task, 'duration_secs': 1.00969} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.971496] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 973.972804] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5221c6f-52d8-4df1-8d37-a5b54bb44c58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.087252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 974.087252] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance network_info: |[{"id": "73909075-891e-4fc4-a912-c3757fcda156", "address": "fa:16:3e:52:ee:b1", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73909075-89", "ovs_interfaceid": "73909075-891e-4fc4-a912-c3757fcda156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 974.087632] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864977, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.567109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.087632] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Acquired lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.087632] env[69982]: DEBUG nova.network.neutron [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Refreshing network info cache for port 73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 974.087632] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:ee:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73909075-891e-4fc4-a912-c3757fcda156', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 974.094261] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.094509] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 56ad2f3f-c24f-446e-8df7-09fde60ba6cd/56ad2f3f-c24f-446e-8df7-09fde60ba6cd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.094765] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.096082] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 974.096328] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fc34172-d65c-40d7-9bf5-5ecf062f731c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.104151] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bca5454d-48e0-4b12-9040-502d2f4e5fd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.125611] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 974.125611] env[69982]: value = "task-3864984" [ 974.125611] env[69982]: _type = "Task" [ 974.125611] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.125611] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 974.125611] env[69982]: value = "task-3864985" [ 974.125611] env[69982]: _type = "Task" [ 974.125611] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.140506] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864985, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.144512] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864984, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.228129] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864980, 'name': PowerOffVM_Task, 'duration_secs': 0.46687} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.228580] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.228874] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 974.229277] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be9105b2-cff1-44f7-9546-1772bbc6c158 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.246298] env[69982]: DEBUG oslo_vmware.api [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3864981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.450677} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.247066] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.247404] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.247699] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.247979] env[69982]: INFO nova.compute.manager [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Took 1.65 seconds to destroy the instance on the hypervisor. [ 974.248379] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.248660] env[69982]: DEBUG nova.compute.manager [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.248841] env[69982]: DEBUG nova.network.neutron [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 974.309660] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 974.309889] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 974.310088] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleting the datastore file [datastore2] 43a8e7dd-843b-49f6-9edb-60c2b380e9c2 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 974.312612] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-510fe5a9-9d9b-4871-9ffc-96e766ee5698 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.315695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4cedab56-fe50-4bf7-a4a9-2b336c5c9059 tempest-MigrationsAdminTest-1206652408 tempest-MigrationsAdminTest-1206652408-project-member] Lock "35fdf25e-c8c0-4123-a95a-2a4c1a504f99" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.669s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.321469] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for the task: (returnval){ [ 974.321469] env[69982]: value = "task-3864987" [ 974.321469] env[69982]: _type = "Task" [ 974.321469] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.331668] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864987, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.390478] env[69982]: DEBUG oslo_vmware.api [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Task: {'id': task-3864983, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396884} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.390478] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.390478] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.390478] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.390783] env[69982]: INFO nova.compute.manager [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Took 1.15 seconds to destroy the instance on the hypervisor. [ 974.390855] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.391089] env[69982]: DEBUG nova.compute.manager [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.391195] env[69982]: DEBUG nova.network.neutron [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 974.420388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.508327] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 974.508327] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c6d43f3f-f6ca-49c8-80e8-9bcade3fcaf3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.513100] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 974.513100] env[69982]: value = "task-3864988" [ 974.513100] env[69982]: _type = "Task" [ 974.513100] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.524464] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864988, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.640024] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864985, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.643807] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864984, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090277} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.644519] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.645228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfab4f5-49e6-4c37-bcc9-5f9227bcaf32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.677022] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 56ad2f3f-c24f-446e-8df7-09fde60ba6cd/56ad2f3f-c24f-446e-8df7-09fde60ba6cd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.677331] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28b8c401-0fee-473d-ae06-c5e62362454f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.701477] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 974.701477] env[69982]: value = "task-3864989" [ 974.701477] env[69982]: _type = "Task" [ 974.701477] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.712667] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.763152] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.763523] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14343ed7-1075-4999-8b61-7554f7f5a775 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.773044] env[69982]: DEBUG oslo_vmware.api [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 974.773044] env[69982]: value = "task-3864990" [ 974.773044] env[69982]: _type = "Task" [ 974.773044] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.787499] env[69982]: DEBUG oslo_vmware.api [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.839779] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864987, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.031240] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864988, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.065053] env[69982]: DEBUG nova.network.neutron [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updated VIF entry in instance network info cache for port 73909075-891e-4fc4-a912-c3757fcda156. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 975.065355] env[69982]: DEBUG nova.network.neutron [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updating instance_info_cache with network_info: [{"id": "73909075-891e-4fc4-a912-c3757fcda156", "address": "fa:16:3e:52:ee:b1", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73909075-89", "ovs_interfaceid": "73909075-891e-4fc4-a912-c3757fcda156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.146507] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3864985, 'name': CreateVM_Task, 'duration_secs': 0.92776} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.146507] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 975.146746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.146919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.147297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 975.147572] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e53502cd-bac0-42e8-9e5c-4a70486f40f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.154615] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 975.154615] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdf00d-4cde-5f07-fc62-159789eb897a" [ 975.154615] env[69982]: _type = "Task" [ 975.154615] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.172012] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdf00d-4cde-5f07-fc62-159789eb897a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.218628] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864989, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.282892] env[69982]: DEBUG oslo_vmware.api [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3864990, 'name': PowerOffVM_Task, 'duration_secs': 0.267497} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.285515] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 975.285720] env[69982]: DEBUG nova.compute.manager [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 975.287947] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2b3ed8-af24-4074-9ec1-90d61509846a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.342642] env[69982]: DEBUG oslo_vmware.api [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Task: {'id': task-3864987, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.512883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.342642] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 975.342642] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 975.342642] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 975.342811] env[69982]: INFO nova.compute.manager [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 1.65 seconds to destroy the instance on the hypervisor. [ 975.343446] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 975.346088] env[69982]: DEBUG nova.compute.manager [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 975.346209] env[69982]: DEBUG nova.network.neutron [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 975.417119] env[69982]: DEBUG nova.compute.manager [req-b86375a9-0942-4cf9-bcb2-5e75a8128005 req-b927bc20-6333-4356-9b66-64d44e1e8f5d service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Received event network-vif-deleted-46457931-d8c0-40b0-864b-9a4b8489c524 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.417434] env[69982]: INFO nova.compute.manager [req-b86375a9-0942-4cf9-bcb2-5e75a8128005 req-b927bc20-6333-4356-9b66-64d44e1e8f5d service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Neutron deleted interface 46457931-d8c0-40b0-864b-9a4b8489c524; detaching it from the instance and deleting it from the info cache [ 975.417537] env[69982]: DEBUG nova.network.neutron [req-b86375a9-0942-4cf9-bcb2-5e75a8128005 req-b927bc20-6333-4356-9b66-64d44e1e8f5d service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.428990] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85762008-7911-475e-ad04-f6d950c75b9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.439515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94802b0-7d80-4009-b78c-5272896e4674 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.473447] env[69982]: DEBUG nova.network.neutron [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.477856] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b22a15-22b9-47e7-8640-98c77037b66a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.484901] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb21e96-acc1-4d03-b83e-f1cfec680b61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.501222] env[69982]: DEBUG nova.compute.provider_tree [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 975.525838] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864988, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.572437] env[69982]: DEBUG oslo_concurrency.lockutils [req-0acec971-2952-43d6-bd85-675af2c64abe req-7dc19c5b-ce35-4468-84be-8f3566419d9d service nova] Releasing lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.603459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.603807] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.604087] env[69982]: INFO nova.compute.manager [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Attaching volume a121791e-c716-401f-b84f-37fb82a9d242 to /dev/sdb [ 975.661241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566c14eb-6a97-4f79-8104-1aeed7e60976 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.670992] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fdf00d-4cde-5f07-fc62-159789eb897a, 'name': SearchDatastore_Task, 'duration_secs': 0.024127} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.672931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 975.673204] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 975.673443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.673593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.673803] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 975.674168] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43357370-c472-4465-838a-d1d4abc9e69c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.676533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d19e50-3644-4b17-917e-fd9a093b60a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.696100] env[69982]: DEBUG nova.virt.block_device [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating existing volume attachment record: cbb505d4-40aa-42fe-8b62-8ce5b0be1a7a {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 975.699705] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 975.700112] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 975.701064] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-820aea14-5e1a-40ef-ae32-d7e192c8c211 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.720735] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 975.720735] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c05ba-f5d4-612c-5912-afca40a690b3" [ 975.720735] env[69982]: _type = "Task" [ 975.720735] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.720735] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864989, 'name': ReconfigVM_Task, 'duration_secs': 0.558054} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.720735] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 56ad2f3f-c24f-446e-8df7-09fde60ba6cd/56ad2f3f-c24f-446e-8df7-09fde60ba6cd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.725510] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2acd1c1-d951-447f-aea1-14c376ec5f99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.733206] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c05ba-f5d4-612c-5912-afca40a690b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.736170] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 975.736170] env[69982]: value = "task-3864991" [ 975.736170] env[69982]: _type = "Task" [ 975.736170] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.745735] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864991, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.767203] env[69982]: DEBUG nova.compute.manager [req-ed0f347f-78c6-4439-988d-e5347bbb27aa req-ea5862b9-4c18-4696-91de-c8a2e5879bd0 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Received event network-vif-deleted-0f022a40-3503-463d-a390-efa4ea123ef8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.767782] env[69982]: INFO nova.compute.manager [req-ed0f347f-78c6-4439-988d-e5347bbb27aa req-ea5862b9-4c18-4696-91de-c8a2e5879bd0 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Neutron deleted interface 0f022a40-3503-463d-a390-efa4ea123ef8; detaching it from the instance and deleting it from the info cache [ 975.767782] env[69982]: DEBUG nova.network.neutron [req-ed0f347f-78c6-4439-988d-e5347bbb27aa req-ea5862b9-4c18-4696-91de-c8a2e5879bd0 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.801062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-73b840a5-ec50-43a1-8002-9e2ce590d3d0 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.061s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.920598] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-898c8f7d-4ce5-47bf-865c-87a4af6c7bbd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.933705] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5edccada-88f1-4017-8782-77314b7b4827 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.985192] env[69982]: INFO nova.compute.manager [-] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Took 1.74 seconds to deallocate network for instance. [ 975.986043] env[69982]: DEBUG nova.compute.manager [req-b86375a9-0942-4cf9-bcb2-5e75a8128005 req-b927bc20-6333-4356-9b66-64d44e1e8f5d service nova] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Detach interface failed, port_id=46457931-d8c0-40b0-864b-9a4b8489c524, reason: Instance 5b79fc38-ace3-4f94-8d1c-b77912f44a1d could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 976.008431] env[69982]: DEBUG nova.network.neutron [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.027544] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864988, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.039574] env[69982]: ERROR nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [req-bdc5bea8-449c-42ea-98ca-dcb04685d5a3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bdc5bea8-449c-42ea-98ca-dcb04685d5a3"}]} [ 976.058036] env[69982]: DEBUG nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 976.082019] env[69982]: DEBUG nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 976.082374] env[69982]: DEBUG nova.compute.provider_tree [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.098099] env[69982]: DEBUG nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 976.128516] env[69982]: DEBUG nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 976.233193] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520c05ba-f5d4-612c-5912-afca40a690b3, 'name': SearchDatastore_Task, 'duration_secs': 0.030871} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.234200] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9212b959-6f90-4c5b-9d3a-6b72b120e96f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.240471] env[69982]: DEBUG nova.network.neutron [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.259786] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 976.259786] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e48484-a134-ebba-a347-0ee787e18a4a" [ 976.259786] env[69982]: _type = "Task" [ 976.259786] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.260108] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864991, 'name': Rename_Task, 'duration_secs': 0.240565} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.263177] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.267868] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89fb0753-9a56-455f-b369-68c59620b52a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.271861] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86b01d07-5ff6-4b42-baf0-4973cfc7f4e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.277060] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e48484-a134-ebba-a347-0ee787e18a4a, 'name': SearchDatastore_Task, 'duration_secs': 0.018326} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.281530] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.281834] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 976.282569] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 976.282569] env[69982]: value = "task-3864993" [ 976.282569] env[69982]: _type = "Task" [ 976.282569] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.282997] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3fcd4288-471e-4db2-93b2-b4304c30bec0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.295114] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5174a84f-aed6-46d7-b539-3125230b003b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.312050] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 976.312050] env[69982]: value = "task-3864994" [ 976.312050] env[69982]: _type = "Task" [ 976.312050] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.322144] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864993, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.344845] env[69982]: DEBUG nova.compute.manager [req-ed0f347f-78c6-4439-988d-e5347bbb27aa req-ea5862b9-4c18-4696-91de-c8a2e5879bd0 service nova] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Detach interface failed, port_id=0f022a40-3503-463d-a390-efa4ea123ef8, reason: Instance 43a8e7dd-843b-49f6-9edb-60c2b380e9c2 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 976.354847] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.494550] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.509970] env[69982]: INFO nova.compute.manager [-] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Took 2.12 seconds to deallocate network for instance. [ 976.529523] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3864988, 'name': CloneVM_Task, 'duration_secs': 1.739158} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.533023] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created linked-clone VM from snapshot [ 976.533023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cb7cea-1cb6-43d8-ad9d-bea3ad0f3c02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.538923] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploading image 8433b49c-669a-4eb6-869b-79df8bf1db49 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 976.574147] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 976.574147] env[69982]: value = "vm-768019" [ 976.574147] env[69982]: _type = "VirtualMachine" [ 976.574147] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 976.575980] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ed1f8456-9da7-4ae9-b53e-72666cfebc33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.589158] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease: (returnval){ [ 976.589158] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52265e5f-f29f-8453-796e-16394e3a16ff" [ 976.589158] env[69982]: _type = "HttpNfcLease" [ 976.589158] env[69982]: } obtained for exporting VM: (result){ [ 976.589158] env[69982]: value = "vm-768019" [ 976.589158] env[69982]: _type = "VirtualMachine" [ 976.589158] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 976.590521] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the lease: (returnval){ [ 976.590521] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52265e5f-f29f-8453-796e-16394e3a16ff" [ 976.590521] env[69982]: _type = "HttpNfcLease" [ 976.590521] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 976.600581] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 976.600581] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52265e5f-f29f-8453-796e-16394e3a16ff" [ 976.600581] env[69982]: _type = "HttpNfcLease" [ 976.600581] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 976.734245] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e77a80-b242-480a-a519-1d6b37a33963 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.744411] env[69982]: INFO nova.compute.manager [-] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Took 1.40 seconds to deallocate network for instance. [ 976.747203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c27dadc-b1ab-43e8-a381-04a0df6959e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.795023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111cd781-4bfa-4dd9-995f-d68366b4a501 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.808427] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe515a3-e2b1-48a7-892a-cf11f0ea8c4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.813088] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864993, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.823988] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864994, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.834058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5100234f-ea02-40bf-b883-fa9a159c7637" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.834355] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.834635] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.834859] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.835114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.837131] env[69982]: DEBUG nova.compute.provider_tree [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 976.838777] env[69982]: INFO nova.compute.manager [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Terminating instance [ 977.017488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.102345] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 977.102345] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52265e5f-f29f-8453-796e-16394e3a16ff" [ 977.102345] env[69982]: _type = "HttpNfcLease" [ 977.102345] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 977.102708] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 977.102708] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52265e5f-f29f-8453-796e-16394e3a16ff" [ 977.102708] env[69982]: _type = "HttpNfcLease" [ 977.102708] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 977.103464] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0369382f-dd3a-4dea-b8a5-1ad8d2694b74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.112214] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 977.112446] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 977.219706] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bb58efc7-417f-421f-9bf4-821e361241b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.265009] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.306472] env[69982]: DEBUG oslo_vmware.api [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3864993, 'name': PowerOnVM_Task, 'duration_secs': 0.599709} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.306756] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.306960] env[69982]: INFO nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Took 10.43 seconds to spawn the instance on the hypervisor. [ 977.307150] env[69982]: DEBUG nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 977.307965] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb12d2f-520f-4532-9db2-f76867a53fc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.326654] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.841905} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.326908] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 977.327251] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 977.327447] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0386631e-74be-4bf2-a456-f05cfe89d6fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.334657] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 977.334657] env[69982]: value = "task-3864998" [ 977.334657] env[69982]: _type = "Task" [ 977.334657] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.342796] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.348061] env[69982]: DEBUG nova.compute.manager [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 977.348299] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.349318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f66698-7c96-4397-bb51-58850a173c3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.357227] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.357555] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ed5ad33-d992-4896-a023-9ea6e724f69d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.388404] env[69982]: DEBUG nova.scheduler.client.report [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 977.388404] env[69982]: DEBUG nova.compute.provider_tree [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 102 to 103 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 977.388404] env[69982]: DEBUG nova.compute.provider_tree [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 977.433075] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.433204] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.433333] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore1] 5100234f-ea02-40bf-b883-fa9a159c7637 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.433673] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb132137-8e3b-4efc-9f01-1f5bf84148ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.441686] env[69982]: DEBUG oslo_vmware.api [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 977.441686] env[69982]: value = "task-3865000" [ 977.441686] env[69982]: _type = "Task" [ 977.441686] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.452093] env[69982]: DEBUG oslo_vmware.api [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.456189] env[69982]: DEBUG nova.compute.manager [req-dcf069b4-cd1c-43dc-a003-f19e84dba5bf req-9aa4b14a-5954-4779-acb8-f9a220035a5f service nova] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Received event network-vif-deleted-ff1c200e-1846-4f71-8cf1-a64fa7ea9a92 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.835145] env[69982]: INFO nova.compute.manager [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Took 40.58 seconds to build instance. [ 977.851591] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3864998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195379} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.851991] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 977.852970] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a037f7-af96-4b96-beb5-d98fecc96d1f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.884843] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 977.886353] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b216179-ed80-41cf-b70f-34fc18f5d80a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.903801] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.138s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.904610] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 977.907857] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.344s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.908197] env[69982]: DEBUG nova.objects.instance [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lazy-loading 'resources' on Instance uuid a4064177-051b-4ec8-a1fc-fa5d299add8b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.918114] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 977.918114] env[69982]: value = "task-3865001" [ 977.918114] env[69982]: _type = "Task" [ 977.918114] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.932603] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865001, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.953553] env[69982]: DEBUG oslo_vmware.api [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236531} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.954523] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 977.954756] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 977.954942] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 977.955151] env[69982]: INFO nova.compute.manager [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Took 0.61 seconds to destroy the instance on the hypervisor. [ 977.955546] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.959022] env[69982]: DEBUG nova.compute.manager [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 977.959022] env[69982]: DEBUG nova.network.neutron [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 978.341600] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4c187e2e-b9a6-44b4-b603-570965abfd0b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.238s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.412758] env[69982]: DEBUG nova.compute.utils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 978.419537] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 978.419857] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 978.445299] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.498365] env[69982]: DEBUG nova.policy [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493da3ee04094ba4ac17893d999ac99e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc588ded27b49d4826535649105aa88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 978.500195] env[69982]: DEBUG nova.compute.manager [req-9f10045b-331f-4874-bf91-abb9d6e53df9 req-f011cd69-49c4-4198-a30d-20bd7f2432fb service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Received event network-vif-deleted-5e45ab8d-a115-4ed7-a813-da3d3003694f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 978.500663] env[69982]: INFO nova.compute.manager [req-9f10045b-331f-4874-bf91-abb9d6e53df9 req-f011cd69-49c4-4198-a30d-20bd7f2432fb service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Neutron deleted interface 5e45ab8d-a115-4ed7-a813-da3d3003694f; detaching it from the instance and deleting it from the info cache [ 978.503477] env[69982]: DEBUG nova.network.neutron [req-9f10045b-331f-4874-bf91-abb9d6e53df9 req-f011cd69-49c4-4198-a30d-20bd7f2432fb service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.856673] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Successfully created port: 5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 978.921053] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 978.939877] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.946924] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.947180] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.947459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.947833] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.948307] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.955865] env[69982]: INFO nova.compute.manager [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Terminating instance [ 978.978432] env[69982]: DEBUG nova.network.neutron [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 979.009448] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-23b32707-f66f-4b5c-9024-0d02bb191b3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.023196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47f66d4-1ee6-428c-8be6-99743f2d30b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.040271] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa7afcd-8808-446c-afae-18a1d3c0d927 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.050323] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbaf4e0-765d-4942-a726-ba6cbd26829a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.107413] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b8b041-733f-4770-b8bb-3e5b82fd992d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.110626] env[69982]: DEBUG nova.compute.manager [req-9f10045b-331f-4874-bf91-abb9d6e53df9 req-f011cd69-49c4-4198-a30d-20bd7f2432fb service nova] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Detach interface failed, port_id=5e45ab8d-a115-4ed7-a813-da3d3003694f, reason: Instance 5100234f-ea02-40bf-b883-fa9a159c7637 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 979.118179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3200cae1-2ac1-4bff-8819-d60398ceafdd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.135593] env[69982]: DEBUG nova.compute.provider_tree [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.443527] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865001, 'name': ReconfigVM_Task, 'duration_secs': 1.232268} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.443527] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 979.443527] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-710f7ce6-680c-4734-ac06-993247abf1eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.452785] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 979.452785] env[69982]: value = "task-3865002" [ 979.452785] env[69982]: _type = "Task" [ 979.452785] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.465853] env[69982]: DEBUG nova.compute.manager [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.466241] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.468217] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865002, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.468217] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e18bf59-daac-4fa4-991c-3397062c333c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.477233] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.477233] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-66c4715c-ad17-430a-abc8-0a3b4c2f5d98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.483029] env[69982]: INFO nova.compute.manager [-] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Took 1.52 seconds to deallocate network for instance. [ 979.485407] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 979.485407] env[69982]: value = "task-3865003" [ 979.485407] env[69982]: _type = "Task" [ 979.485407] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.504823] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.689970] env[69982]: DEBUG nova.scheduler.client.report [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 103 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 979.689970] env[69982]: DEBUG nova.compute.provider_tree [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 103 to 104 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 979.689970] env[69982]: DEBUG nova.compute.provider_tree [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.940403] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 979.969119] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865002, 'name': Rename_Task, 'duration_secs': 0.215184} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.969248] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6feb5bb4f88c954a7b0802353e512433',container_format='bare',created_at=2025-05-07T07:09:56Z,direct_url=,disk_format='vmdk',id=b87792c6-b066-467e-ada6-8dd52594cca2,min_disk=1,min_ram=0,name='tempest-test-snap-887802694',owner='9cc588ded27b49d4826535649105aa88',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-05-07T07:10:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.969629] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.972297] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.972580] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.972580] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.972580] env[69982]: DEBUG nova.virt.hardware [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.972580] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 979.972759] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c452a6c7-b153-40e5-b730-226987fd2ea1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.975609] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-506598fd-0e46-4961-bd6b-6ad7dfd96807 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.985289] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de07f783-4442-480f-8145-7ce95e5299d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.989595] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 979.989595] env[69982]: value = "task-3865004" [ 979.989595] env[69982]: _type = "Task" [ 979.989595] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.007911] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.014756] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865003, 'name': PowerOffVM_Task, 'duration_secs': 0.261263} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.018299] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.018500] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 980.018799] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865004, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.019068] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7674e9a-286e-4ab6-9d60-579e2d2454e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.087998] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 980.088286] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 980.088428] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore1] 56ad2f3f-c24f-446e-8df7-09fde60ba6cd {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 980.088814] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4821f37-d4c1-40da-aef5-12f0d5608e29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.096527] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 980.096527] env[69982]: value = "task-3865006" [ 980.096527] env[69982]: _type = "Task" [ 980.096527] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.106369] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.197899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.290s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.200546] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.173s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.201466] env[69982]: DEBUG nova.objects.instance [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 980.252176] env[69982]: INFO nova.scheduler.client.report [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Deleted allocations for instance a4064177-051b-4ec8-a1fc-fa5d299add8b [ 980.504209] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865004, 'name': PowerOnVM_Task} progress is 76%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.574102] env[69982]: DEBUG nova.compute.manager [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Received event network-vif-plugged-5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.574352] env[69982]: DEBUG oslo_concurrency.lockutils [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] Acquiring lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.575031] env[69982]: DEBUG oslo_concurrency.lockutils [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] Lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.575031] env[69982]: DEBUG oslo_concurrency.lockutils [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] Lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.575031] env[69982]: DEBUG nova.compute.manager [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] No waiting events found dispatching network-vif-plugged-5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 980.576855] env[69982]: WARNING nova.compute.manager [req-60e781e4-8166-4e4f-bf4c-975b018ed803 req-9b2aa336-9f3d-45fb-912b-04381f5c0cc2 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Received unexpected event network-vif-plugged-5664c82e-ad37-48c8-b71c-799c16942ae8 for instance with vm_state building and task_state spawning. [ 980.607820] env[69982]: DEBUG oslo_vmware.api [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44912} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.609693] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.609693] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.609693] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.609693] env[69982]: INFO nova.compute.manager [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 980.609693] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.609693] env[69982]: DEBUG nova.compute.manager [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.610118] env[69982]: DEBUG nova.network.neutron [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 980.704049] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Successfully updated port: 5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 980.770597] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b266a457-40d9-4014-bc9d-6e5934593560 tempest-ServerRescueTestJSON-312521432 tempest-ServerRescueTestJSON-312521432-project-member] Lock "a4064177-051b-4ec8-a1fc-fa5d299add8b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.246s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.899424] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.899580] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.988940] env[69982]: DEBUG nova.compute.manager [req-d3e2ecbf-6439-4b57-b779-0efe4fc9a61d req-7a11619e-66c3-43f6-83a9-ab0bf0c42581 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Received event network-vif-deleted-73bae6c7-ad78-4ee7-921b-f3fa33dbae08 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.989455] env[69982]: INFO nova.compute.manager [req-d3e2ecbf-6439-4b57-b779-0efe4fc9a61d req-7a11619e-66c3-43f6-83a9-ab0bf0c42581 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Neutron deleted interface 73bae6c7-ad78-4ee7-921b-f3fa33dbae08; detaching it from the instance and deleting it from the info cache [ 980.989455] env[69982]: DEBUG nova.network.neutron [req-d3e2ecbf-6439-4b57-b779-0efe4fc9a61d req-7a11619e-66c3-43f6-83a9-ab0bf0c42581 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.004845] env[69982]: DEBUG oslo_vmware.api [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865004, 'name': PowerOnVM_Task, 'duration_secs': 0.898839} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.005962] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.006284] env[69982]: INFO nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Took 9.60 seconds to spawn the instance on the hypervisor. [ 981.007028] env[69982]: DEBUG nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.008189] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cda8d13e-aa18-48cf-a895-d49245fb3f15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.206424] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.206424] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.206424] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 981.214255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c39faada-dda2-42f6-a99d-5c54558ef466 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.214989] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 27.610s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.408540] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 981.469156] env[69982]: DEBUG nova.network.neutron [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.493522] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21c821a9-35c8-4ffa-acf0-9f445ce5edbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.505314] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e752631-7e76-4d54-944f-4727e2c43a91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.548127] env[69982]: DEBUG nova.compute.manager [req-d3e2ecbf-6439-4b57-b779-0efe4fc9a61d req-7a11619e-66c3-43f6-83a9-ab0bf0c42581 service nova] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Detach interface failed, port_id=73bae6c7-ad78-4ee7-921b-f3fa33dbae08, reason: Instance 56ad2f3f-c24f-446e-8df7-09fde60ba6cd could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 981.548879] env[69982]: INFO nova.compute.manager [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Took 40.67 seconds to build instance. [ 981.766542] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 981.936565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.974601] env[69982]: INFO nova.compute.manager [-] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Took 1.36 seconds to deallocate network for instance. [ 982.053289] env[69982]: DEBUG oslo_concurrency.lockutils [None req-db01119b-a040-48a3-9296-09f785b9c51c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.188s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.082165] env[69982]: DEBUG nova.network.neutron [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Updating instance_info_cache with network_info: [{"id": "5664c82e-ad37-48c8-b71c-799c16942ae8", "address": "fa:16:3e:9c:bf:de", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5664c82e-ad", "ovs_interfaceid": "5664c82e-ad37-48c8-b71c-799c16942ae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.302620] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23bb2397-2f21-46fc-9096-ad353c2e0c47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.312517] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315f10ce-39a6-4cdc-b665-b03e68c61454 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.352513] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceae6269-5e63-4389-8b7f-7194755a86d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.361603] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57386c5-6496-4f18-a1d9-9e1a83e33cf7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.377704] env[69982]: DEBUG nova.compute.provider_tree [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.482476] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.585403] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.585849] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Instance network_info: |[{"id": "5664c82e-ad37-48c8-b71c-799c16942ae8", "address": "fa:16:3e:9c:bf:de", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5664c82e-ad", "ovs_interfaceid": "5664c82e-ad37-48c8-b71c-799c16942ae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 982.586337] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:bf:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31a7f15-a808-4199-9071-31fd05e316ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5664c82e-ad37-48c8-b71c-799c16942ae8', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 982.595381] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.595671] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 982.595932] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a617f10-32b7-4931-9978-c37d8da09217 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.617125] env[69982]: DEBUG nova.compute.manager [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Received event network-changed-5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 982.617279] env[69982]: DEBUG nova.compute.manager [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Refreshing instance network info cache due to event network-changed-5664c82e-ad37-48c8-b71c-799c16942ae8. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 982.617790] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Acquiring lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.617955] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Acquired lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.618160] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Refreshing network info cache for port 5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 982.625620] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 982.625620] env[69982]: value = "task-3865008" [ 982.625620] env[69982]: _type = "Task" [ 982.625620] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.636291] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865008, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.763730] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 982.764260] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768021', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'name': 'volume-a121791e-c716-401f-b84f-37fb82a9d242', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '331f218a-ad6b-4417-b56d-83113e0c92cb', 'attached_at': '', 'detached_at': '', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'serial': 'a121791e-c716-401f-b84f-37fb82a9d242'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 982.766176] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51690ab2-93b1-4d5f-97b7-4de648e8b8dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.797204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f684e7a-4268-4dab-8839-bf408423d784 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.830073] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] volume-a121791e-c716-401f-b84f-37fb82a9d242/volume-a121791e-c716-401f-b84f-37fb82a9d242.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.830954] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89f5811b-b63f-4c80-b8ee-3ceb8f8f10a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.855254] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 982.855254] env[69982]: value = "task-3865009" [ 982.855254] env[69982]: _type = "Task" [ 982.855254] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.866816] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865009, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.887021] env[69982]: DEBUG nova.scheduler.client.report [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.136333] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865008, 'name': CreateVM_Task, 'duration_secs': 0.457464} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.136703] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 983.137500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.137668] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.138072] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 983.138343] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba3cee1-58f6-4022-8184-e48682ba0cd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.143868] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 983.143868] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d4a3e-0ce7-7c9d-203c-6b924318310e" [ 983.143868] env[69982]: _type = "Task" [ 983.143868] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.157302] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520d4a3e-0ce7-7c9d-203c-6b924318310e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.365839] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.382560] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Updated VIF entry in instance network info cache for port 5664c82e-ad37-48c8-b71c-799c16942ae8. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 983.382968] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Updating instance_info_cache with network_info: [{"id": "5664c82e-ad37-48c8-b71c-799c16942ae8", "address": "fa:16:3e:9c:bf:de", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5664c82e-ad", "ovs_interfaceid": "5664c82e-ad37-48c8-b71c-799c16942ae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.656094] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.656397] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Processing image b87792c6-b066-467e-ada6-8dd52594cca2 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 983.656704] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.656936] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.657160] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 983.657446] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc45fb76-4190-4aa8-8d9e-f3b948417177 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.667989] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 983.668214] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 983.669038] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7f593a1-0688-4306-9cd8-31ae18617d2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.675342] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 983.675342] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e979ec-84a0-c99c-4327-952da45ad640" [ 983.675342] env[69982]: _type = "Task" [ 983.675342] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.684568] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e979ec-84a0-c99c-4327-952da45ad640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.867281] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865009, 'name': ReconfigVM_Task, 'duration_secs': 0.751377} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.867699] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfigured VM instance instance-00000043 to attach disk [datastore2] volume-a121791e-c716-401f-b84f-37fb82a9d242/volume-a121791e-c716-401f-b84f-37fb82a9d242.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 983.873154] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ae0c645-7a74-4a51-aa76-c5d865f0c450 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.889016] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Releasing lock "refresh_cache-ab14774e-c834-41e9-bb3f-87722b51070e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 983.889314] env[69982]: DEBUG nova.compute.manager [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Received event network-changed-73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 983.889548] env[69982]: DEBUG nova.compute.manager [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Refreshing instance network info cache due to event network-changed-73909075-891e-4fc4-a912-c3757fcda156. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 983.889781] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Acquiring lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.890086] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Acquired lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 983.890291] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Refreshing network info cache for port 73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 983.893182] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 983.893182] env[69982]: value = "task-3865010" [ 983.893182] env[69982]: _type = "Task" [ 983.893182] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.899335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.685s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.908343] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.005s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.908593] env[69982]: DEBUG nova.objects.instance [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lazy-loading 'resources' on Instance uuid 10a4294b-39ce-4643-98b5-71ac283f05f5 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 983.909862] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865010, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.187943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 984.188382] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Fetch image to [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b/OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 984.188497] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Downloading stream optimized image b87792c6-b066-467e-ada6-8dd52594cca2 to [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b/OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b.vmdk on the data store datastore1 as vApp {{(pid=69982) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 984.188629] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Downloading image file data b87792c6-b066-467e-ada6-8dd52594cca2 to the ESX as VM named 'OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b' {{(pid=69982) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 984.273839] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 984.273839] env[69982]: value = "resgroup-9" [ 984.273839] env[69982]: _type = "ResourcePool" [ 984.273839] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 984.274191] env[69982]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b0388f58-f673-4997-ba97-fa5e3ae6e685 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.296442] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease: (returnval){ [ 984.296442] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 984.296442] env[69982]: _type = "HttpNfcLease" [ 984.296442] env[69982]: } obtained for vApp import into resource pool (val){ [ 984.296442] env[69982]: value = "resgroup-9" [ 984.296442] env[69982]: _type = "ResourcePool" [ 984.296442] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 984.296712] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the lease: (returnval){ [ 984.296712] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 984.296712] env[69982]: _type = "HttpNfcLease" [ 984.296712] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 984.304465] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.304465] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 984.304465] env[69982]: _type = "HttpNfcLease" [ 984.304465] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.419040] env[69982]: DEBUG oslo_vmware.api [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865010, 'name': ReconfigVM_Task, 'duration_secs': 0.240255} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.421379] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768021', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'name': 'volume-a121791e-c716-401f-b84f-37fb82a9d242', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '331f218a-ad6b-4417-b56d-83113e0c92cb', 'attached_at': '', 'detached_at': '', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'serial': 'a121791e-c716-401f-b84f-37fb82a9d242'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 984.496198] env[69982]: INFO nova.scheduler.client.report [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocation for migration 18bb5912-7e73-47de-bffe-7728a8253c69 [ 984.698068] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updated VIF entry in instance network info cache for port 73909075-891e-4fc4-a912-c3757fcda156. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 984.698464] env[69982]: DEBUG nova.network.neutron [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updating instance_info_cache with network_info: [{"id": "73909075-891e-4fc4-a912-c3757fcda156", "address": "fa:16:3e:52:ee:b1", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73909075-89", "ovs_interfaceid": "73909075-891e-4fc4-a912-c3757fcda156", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.806871] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.806871] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 984.806871] env[69982]: _type = "HttpNfcLease" [ 984.806871] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.952188] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e31d31-5b71-4738-813d-1f553b728691 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.960692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f656134f-81ef-471b-9808-5b731f86f041 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.001470] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bff7ed2-0123-40da-af97-a26e26e894d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.006028] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b77506a3-ff59-43a9-a958-bf54ad585289 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 34.576s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.013944] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9857db99-a4f0-4568-9124-779fed5f18cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.030690] env[69982]: DEBUG nova.compute.provider_tree [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.204098] env[69982]: DEBUG oslo_concurrency.lockutils [req-fad9ecc3-fca3-4025-837b-9403d6e680da req-17ae4623-0ef7-4d1c-9f02-553e6dc12cd5 service nova] Releasing lock "refresh_cache-8518f3c8-738d-468a-9f57-de50e4e67108" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 985.306502] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.306502] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 985.306502] env[69982]: _type = "HttpNfcLease" [ 985.306502] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 985.468447] env[69982]: DEBUG nova.objects.instance [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'flavor' on Instance uuid 331f218a-ad6b-4417-b56d-83113e0c92cb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 985.534079] env[69982]: DEBUG nova.scheduler.client.report [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.807260] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 985.807260] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 985.807260] env[69982]: _type = "HttpNfcLease" [ 985.807260] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 985.871055] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.871182] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.871418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.871620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.871784] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.873976] env[69982]: INFO nova.compute.manager [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Terminating instance [ 985.974448] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a691f17e-7f11-4d04-b591-3937d0c93603 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.371s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.039943] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.042613] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.307s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.042900] env[69982]: DEBUG nova.objects.instance [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lazy-loading 'resources' on Instance uuid a9a14fa8-7f58-48f9-994d-b5063833a81b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 986.072822] env[69982]: INFO nova.scheduler.client.report [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted allocations for instance 10a4294b-39ce-4643-98b5-71ac283f05f5 [ 986.292412] env[69982]: INFO nova.compute.manager [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Rescuing [ 986.292776] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.292892] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.293093] env[69982]: DEBUG nova.network.neutron [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 986.306221] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 986.306221] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 986.306221] env[69982]: _type = "HttpNfcLease" [ 986.306221] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 986.379238] env[69982]: DEBUG nova.compute.manager [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 986.379480] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 986.380421] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d503ff92-14bd-43f1-91bf-8b09d0fe3e24 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.388415] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.388680] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-734faf29-77b3-4729-8536-7d0c1b2e17fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.396068] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 986.396068] env[69982]: value = "task-3865012" [ 986.396068] env[69982]: _type = "Task" [ 986.396068] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.406199] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.583752] env[69982]: DEBUG oslo_concurrency.lockutils [None req-892d1a70-d059-42c6-8c4b-0169714f9567 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "10a4294b-39ce-4643-98b5-71ac283f05f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.305s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.806363] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 986.806363] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 986.806363] env[69982]: _type = "HttpNfcLease" [ 986.806363] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 986.809942] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 986.809942] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f24e12-bfbb-7979-a055-7c955e3fa08c" [ 986.809942] env[69982]: _type = "HttpNfcLease" [ 986.809942] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 986.811417] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5593d002-b400-4b2c-8628-7ba85c6c96c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.822654] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 986.822861] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 986.888657] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1df67fa4-ac3b-42e3-a36f-1cc4a000ea33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.908892] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865012, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.043026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed062bf7-69c1-4e46-bba1-d9be8f78c607 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.051196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951a8533-2fc5-4df5-87e0-d6bdf60d50b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.088089] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd442c93-3dfd-4004-9061-9229fad13b6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.097649] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7f4b94-aed8-4f98-99ab-af5b2e88ade9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.114447] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.202064] env[69982]: DEBUG nova.network.neutron [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.406786] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865012, 'name': PowerOffVM_Task, 'duration_secs': 0.610479} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.407079] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.407250] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 987.407513] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9dc4ed6c-be2e-4534-a69c-06dbbd120b19 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.497940] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 987.498184] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 987.498378] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore2] 9b733e1e-0532-4d91-a460-6b1f1971f388 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 987.498654] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53f9e831-2f02-4673-9227-67ae115a9ce1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.506799] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 987.506799] env[69982]: value = "task-3865014" [ 987.506799] env[69982]: _type = "Task" [ 987.506799] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.515108] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.644098] env[69982]: ERROR nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [req-a2dd8780-a07b-41d2-b315-5578ceb012ab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a2dd8780-a07b-41d2-b315-5578ceb012ab"}]} [ 987.665782] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 987.688313] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 987.688313] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 987.702309] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 987.704706] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.736056] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 987.956443] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 987.957452] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb7eb44-ecf7-4f4c-86b4-60a3bf521967 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.967666] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 987.967881] env[69982]: ERROR oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk due to incomplete transfer. [ 987.968182] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-98328524-6946-4594-82b9-e37da54efe7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.018267] env[69982]: DEBUG oslo_vmware.api [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202698} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.021085] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 988.021300] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 988.021509] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 988.021703] env[69982]: INFO nova.compute.manager [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Took 1.64 seconds to destroy the instance on the hypervisor. [ 988.021990] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.022484] env[69982]: DEBUG nova.compute.manager [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 988.022727] env[69982]: DEBUG nova.network.neutron [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 988.212267] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79043966-81ef-4a23-83ca-8d62e92f5058 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.220734] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a0e1cb-01c3-4da0-937d-cfb400556d22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.258376] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8169c7a-b622-49b4-9cca-d715ff59923e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.268088] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd47a7b-261a-4d44-a3e8-c37aac258d1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.283794] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.303137] env[69982]: DEBUG oslo_vmware.rw_handles [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e9d338-149f-8db7-8037-bf2f509a2ddd/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 988.303373] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploaded image 8433b49c-669a-4eb6-869b-79df8bf1db49 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 988.309245] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 988.309245] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2bdcf3f2-f931-42e2-a1a9-b2ea6efc02e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.315306] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 988.315306] env[69982]: value = "task-3865015" [ 988.315306] env[69982]: _type = "Task" [ 988.315306] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.326949] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865015, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.447310] env[69982]: DEBUG nova.compute.manager [req-ca1c9e06-0088-43e7-8406-90d5539ea6c6 req-43a48feb-b58a-4311-af46-d1be2a8828d5 service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Received event network-vif-deleted-f9356a59-fa6f-4664-b5ff-4a2609f506c3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.447610] env[69982]: INFO nova.compute.manager [req-ca1c9e06-0088-43e7-8406-90d5539ea6c6 req-43a48feb-b58a-4311-af46-d1be2a8828d5 service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Neutron deleted interface f9356a59-fa6f-4664-b5ff-4a2609f506c3; detaching it from the instance and deleting it from the info cache [ 988.447954] env[69982]: DEBUG nova.network.neutron [req-ca1c9e06-0088-43e7-8406-90d5539ea6c6 req-43a48feb-b58a-4311-af46-d1be2a8828d5 service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.653107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.653412] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.660964] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 988.660964] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 988.663613] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189e6dd8-100b-4e4d-bba0-c72732a74ab0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.672095] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 988.672292] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 988.672619] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-95f58623-6029-4e91-baa8-92c69fcbe068 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.811878] env[69982]: ERROR nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [req-fb4de589-6566-4614-8ac0-c6f85c7ba14d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fb4de589-6566-4614-8ac0-c6f85c7ba14d"}]} [ 988.825255] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865015, 'name': Destroy_Task, 'duration_secs': 0.50856} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.825519] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroyed the VM [ 988.825597] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 988.825868] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3b83fe75-40ad-4158-9b80-499f89369864 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.829853] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 988.833550] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 988.833550] env[69982]: value = "task-3865016" [ 988.833550] env[69982]: _type = "Task" [ 988.833550] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.842818] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.845889] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 988.846192] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 44, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 988.860037] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 988.873861] env[69982]: DEBUG nova.network.neutron [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.885903] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 988.951353] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b373a78c-42da-412d-9924-6bdf81f5211f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.964812] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2da8fb-55a3-4b52-b38a-1e4c681e801e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.007415] env[69982]: DEBUG nova.compute.manager [req-ca1c9e06-0088-43e7-8406-90d5539ea6c6 req-43a48feb-b58a-4311-af46-d1be2a8828d5 service nova] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Detach interface failed, port_id=f9356a59-fa6f-4664-b5ff-4a2609f506c3, reason: Instance 9b733e1e-0532-4d91-a460-6b1f1971f388 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 989.009900] env[69982]: DEBUG oslo_vmware.rw_handles [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f76951-067d-a13d-b284-55adb702558f/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 989.010265] env[69982]: INFO nova.virt.vmwareapi.images [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Downloaded image file data b87792c6-b066-467e-ada6-8dd52594cca2 [ 989.011154] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec1125c-7a90-4834-958d-667ad615f9c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.031800] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f036164-4711-4af5-bf19-ea3b7f7cbef9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.048298] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "8358b105-7276-4292-804d-534f9fb1535e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.048586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.048801] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "8358b105-7276-4292-804d-534f9fb1535e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.049055] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.049190] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.051277] env[69982]: INFO nova.compute.manager [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Terminating instance [ 989.074108] env[69982]: INFO nova.virt.vmwareapi.images [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] The imported VM was unregistered [ 989.076069] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 989.076315] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.079231] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7dc5fedf-2563-45ea-904e-384351f31d13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.111604] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.111854] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b/OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b.vmdk to [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk. {{(pid=69982) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 989.112191] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-cd77a77d-870b-47fe-b206-7c42fc472f9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.119984] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 989.119984] env[69982]: value = "task-3865018" [ 989.119984] env[69982]: _type = "Task" [ 989.119984] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.127686] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.128074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.128344] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.128554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.128749] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.131770] env[69982]: INFO nova.compute.manager [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Terminating instance [ 989.137215] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.156689] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 989.268417] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.268720] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d9146f69-e030-4a9f-82d9-d20c969f6795 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.273752] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.274558] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.280125] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 989.280125] env[69982]: value = "task-3865019" [ 989.280125] env[69982]: _type = "Task" [ 989.280125] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.291123] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.349984] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.376560] env[69982]: INFO nova.compute.manager [-] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Took 1.35 seconds to deallocate network for instance. [ 989.387841] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b71c90d-324a-4ce3-9c26-7883ad963d97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.396669] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43ab9c3-4d26-4fcc-b711-3b27b8c44bc6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.462026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b589709f-afc9-4c8f-948f-37dbe36e6f60 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.474973] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fa9ddd-4ed0-4e89-9458-e871a9694867 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.496239] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 989.555489] env[69982]: DEBUG nova.compute.manager [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.555958] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.556878] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc7e9a2-9fa3-4fdc-b1f3-a325d7adcaf0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.566894] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.567326] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-762f492e-7ef0-4285-8a7c-5d9f1896d803 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.575597] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 989.575597] env[69982]: value = "task-3865020" [ 989.575597] env[69982]: _type = "Task" [ 989.575597] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.589062] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.642297] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.643326] env[69982]: DEBUG nova.compute.manager [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.643945] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.645688] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fceaa09d-7485-4599-8f97-b472276af3a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.659432] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.659938] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f384fb23-2d5e-43d4-8327-98748b05be0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.674381] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 989.674381] env[69982]: value = "task-3865021" [ 989.674381] env[69982]: _type = "Task" [ 989.674381] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.684551] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.693115] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.789947] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.790619] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.790981] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.791229] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.791360] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.791540] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.791682] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 989.791836] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.797634] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865019, 'name': PowerOffVM_Task, 'duration_secs': 0.28307} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.798345] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.799372] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8773a51-f1ac-4c75-a180-9fb2dc76631d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.827193] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dc720d-cefb-474f-b00c-68a9f6a6f06a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.851763] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.867805] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.868280] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c39be931-2615-4439-a0df-3922c1b7e0fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.876561] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 989.876561] env[69982]: value = "task-3865022" [ 989.876561] env[69982]: _type = "Task" [ 989.876561] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.883261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.888814] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 989.889449] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.889794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.889960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.890201] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.891433] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84238b9f-b0e0-415d-b54d-5c84a00219a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.909383] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.909636] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.910688] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a85d1838-975d-4a8d-aaa5-0b81a5fbc052 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.917594] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 989.917594] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994981-2eab-c2ea-e45c-deb099736554" [ 989.917594] env[69982]: _type = "Task" [ 989.917594] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.927512] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994981-2eab-c2ea-e45c-deb099736554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.034286] env[69982]: DEBUG nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 990.034588] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 107 to 108 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 990.034770] env[69982]: DEBUG nova.compute.provider_tree [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 990.086042] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865020, 'name': PowerOffVM_Task, 'duration_secs': 0.205429} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.086305] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.086475] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.086730] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fb54810d-3026-42d9-b618-0b5d56d1a147 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.135457] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.154720] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.154986] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.155114] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleting the datastore file [datastore2] 8358b105-7276-4292-804d-534f9fb1535e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.155396] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b24b0aff-c4ba-4ec5-ac9f-f2df869dba3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.168619] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 990.168619] env[69982]: value = "task-3865024" [ 990.168619] env[69982]: _type = "Task" [ 990.168619] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.179942] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.187599] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865021, 'name': PowerOffVM_Task, 'duration_secs': 0.260324} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.187849] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.188031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.188335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43235645-10a7-47a0-ba1f-72e18f35f86b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.258635] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.259114] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.259530] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleting the datastore file [datastore2] bba6f430-5af5-4d8a-9cf4-082207c170a5 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.259915] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea3cd153-5fda-40b1-a64a-275766c48e29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.267822] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for the task: (returnval){ [ 990.267822] env[69982]: value = "task-3865026" [ 990.267822] env[69982]: _type = "Task" [ 990.267822] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.276968] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865026, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.299265] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.349960] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.428763] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52994981-2eab-c2ea-e45c-deb099736554, 'name': SearchDatastore_Task, 'duration_secs': 0.09548} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.429582] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e865548-94f7-4b2c-bc06-bd7c40557d54 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.435559] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 990.435559] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522d37e3-79a9-369b-870f-576d312234d5" [ 990.435559] env[69982]: _type = "Task" [ 990.435559] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.446825] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522d37e3-79a9-369b-870f-576d312234d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.540473] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.498s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.543391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.843s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.545093] env[69982]: INFO nova.compute.claims [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 990.569661] env[69982]: INFO nova.scheduler.client.report [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleted allocations for instance a9a14fa8-7f58-48f9-994d-b5063833a81b [ 990.636249] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.680487] env[69982]: DEBUG oslo_vmware.api [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.312471} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.680870] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.681102] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.681275] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.681519] env[69982]: INFO nova.compute.manager [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 990.681746] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.681949] env[69982]: DEBUG nova.compute.manager [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.682079] env[69982]: DEBUG nova.network.neutron [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 990.780220] env[69982]: DEBUG oslo_vmware.api [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Task: {'id': task-3865026, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.502866} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.780732] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.781526] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.781742] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.782049] env[69982]: INFO nova.compute.manager [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Took 1.14 seconds to destroy the instance on the hypervisor. [ 990.782520] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.782756] env[69982]: DEBUG nova.compute.manager [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 990.783113] env[69982]: DEBUG nova.network.neutron [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 990.850411] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.952573] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522d37e3-79a9-369b-870f-576d312234d5, 'name': SearchDatastore_Task, 'duration_secs': 0.099015} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.952870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.953183] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. {{(pid=69982) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 990.953475] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-895f94d9-fb3a-4b4b-b634-7e1df4cead0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.961477] env[69982]: DEBUG nova.compute.manager [req-ad8509e4-0120-4614-83c0-424ff30d926d req-d0e06ec8-029c-42a8-ac06-d49f22377c7e service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Received event network-vif-deleted-6e14b78a-b061-48f2-ad9b-6430822d4a48 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.961727] env[69982]: INFO nova.compute.manager [req-ad8509e4-0120-4614-83c0-424ff30d926d req-d0e06ec8-029c-42a8-ac06-d49f22377c7e service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Neutron deleted interface 6e14b78a-b061-48f2-ad9b-6430822d4a48; detaching it from the instance and deleting it from the info cache [ 990.962446] env[69982]: DEBUG nova.network.neutron [req-ad8509e4-0120-4614-83c0-424ff30d926d req-d0e06ec8-029c-42a8-ac06-d49f22377c7e service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.965136] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 990.965136] env[69982]: value = "task-3865027" [ 990.965136] env[69982]: _type = "Task" [ 990.965136] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.975986] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.078774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fd776f2f-1915-4d20-a925-e16aaba5e8a0 tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "a9a14fa8-7f58-48f9-994d-b5063833a81b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.093s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.109136] env[69982]: DEBUG nova.compute.manager [req-bf7b8ee0-bf47-4bd3-8111-cb5618c56536 req-312aa0fd-dee1-42a9-a4ce-c8101cb51df9 service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Received event network-vif-deleted-2b95faf4-6ad3-4170-a93a-15d892a8ca46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 991.109136] env[69982]: INFO nova.compute.manager [req-bf7b8ee0-bf47-4bd3-8111-cb5618c56536 req-312aa0fd-dee1-42a9-a4ce-c8101cb51df9 service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Neutron deleted interface 2b95faf4-6ad3-4170-a93a-15d892a8ca46; detaching it from the instance and deleting it from the info cache [ 991.109136] env[69982]: DEBUG nova.network.neutron [req-bf7b8ee0-bf47-4bd3-8111-cb5618c56536 req-312aa0fd-dee1-42a9-a4ce-c8101cb51df9 service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.138290] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.351872] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.432722] env[69982]: DEBUG nova.network.neutron [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.467371] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4eb19e04-493c-4f71-b64a-007d6436da49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.481508] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.486217] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee695b37-e5df-4407-b837-fa302953eca6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.524500] env[69982]: DEBUG nova.compute.manager [req-ad8509e4-0120-4614-83c0-424ff30d926d req-d0e06ec8-029c-42a8-ac06-d49f22377c7e service nova] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Detach interface failed, port_id=6e14b78a-b061-48f2-ad9b-6430822d4a48, reason: Instance 8358b105-7276-4292-804d-534f9fb1535e could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 991.558829] env[69982]: DEBUG nova.network.neutron [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.611807] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fe7151c-b49c-4013-be83-3f5098bdd52a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.624681] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e24381-2091-4693-b33a-ec584b9348cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.656272] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.674352] env[69982]: DEBUG nova.compute.manager [req-bf7b8ee0-bf47-4bd3-8111-cb5618c56536 req-312aa0fd-dee1-42a9-a4ce-c8101cb51df9 service nova] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Detach interface failed, port_id=2b95faf4-6ad3-4170-a93a-15d892a8ca46, reason: Instance bba6f430-5af5-4d8a-9cf4-082207c170a5 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 991.854920] env[69982]: DEBUG oslo_vmware.api [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865016, 'name': RemoveSnapshot_Task, 'duration_secs': 2.783528} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.855186] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 991.855653] env[69982]: INFO nova.compute.manager [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 19.45 seconds to snapshot the instance on the hypervisor. [ 991.935740] env[69982]: INFO nova.compute.manager [-] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Took 1.25 seconds to deallocate network for instance. [ 991.983940] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.028237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09883b41-8619-4eea-bfcf-47c64b32e038 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.037438] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf41f811-145a-482e-992a-0986b9343340 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.069863] env[69982]: INFO nova.compute.manager [-] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Took 1.29 seconds to deallocate network for instance. [ 992.073067] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f2374a-ed2b-4cca-9f39-be848a1e6d3f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.084741] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9eac594-e387-4621-83cd-fe6ad9505080 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.099913] env[69982]: DEBUG nova.compute.provider_tree [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.154693] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865018, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.007344} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.154995] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b/OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b.vmdk to [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk. [ 992.155210] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Cleaning up location [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 992.155384] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c2cca237-f27d-43ec-a7fa-60a4456cb72b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.156041] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3920fb2-b73e-4377-a176-660e75ccc966 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.164137] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 992.164137] env[69982]: value = "task-3865029" [ 992.164137] env[69982]: _type = "Task" [ 992.164137] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.172862] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.414644] env[69982]: DEBUG nova.compute.manager [None req-d0570b0e-d4c8-447f-92ce-d9d9df6deb37 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Found 2 images (rotation: 2) {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 992.443720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.482301] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865027, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.505629} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.482682] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk. [ 992.483430] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb099bdd-1cab-46a1-99d1-7d063a23a7e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.511842] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 992.512922] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0a09cb9-8cc9-4185-8be8-21d93814c745 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.531640] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 992.531640] env[69982]: value = "task-3865030" [ 992.531640] env[69982]: _type = "Task" [ 992.531640] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.540572] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865030, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.580648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.602938] env[69982]: DEBUG nova.scheduler.client.report [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.674665] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196424} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.674947] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.675118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.675379] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk to [datastore1] ab14774e-c834-41e9-bb3f-87722b51070e/ab14774e-c834-41e9-bb3f-87722b51070e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.675649] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4f26366-7b56-4b9a-83c8-bccedbf3e10e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.683438] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 992.683438] env[69982]: value = "task-3865031" [ 992.683438] env[69982]: _type = "Task" [ 992.683438] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.693188] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.043271] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865030, 'name': ReconfigVM_Task, 'duration_secs': 0.321948} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.043590] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb/a4e69d6f-1c15-4f57-92a8-5e81c6be8172-rescue.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.044613] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f404e76-d6dc-4a72-af79-ba3bc3441aa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.075519] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c277e7ed-f6d8-489b-af20-f8ae64b8301c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.092817] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 993.092817] env[69982]: value = "task-3865032" [ 993.092817] env[69982]: _type = "Task" [ 993.092817] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.102242] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.108319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.108918] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 993.111819] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.497s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.112022] env[69982]: DEBUG nova.objects.instance [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 993.195275] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.434136] env[69982]: DEBUG nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.434619] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a5128c-a9ce-4f2c-9282-4308fa29ce04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.604131] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865032, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.617021] env[69982]: DEBUG nova.compute.utils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 993.621939] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 993.694587] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.946572] env[69982]: INFO nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] instance snapshotting [ 993.947293] env[69982]: DEBUG nova.objects.instance [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.105363] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865032, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.123053] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 994.126116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d122e681-3bbd-452e-9e10-19b9feb8397f tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.127419] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.505s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.127705] env[69982]: DEBUG nova.objects.instance [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lazy-loading 'resources' on Instance uuid 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.195053] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.455733] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1372f5c8-6916-40b8-abb9-16682efeeae3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.476755] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b219c5-65bb-4770-a4b6-554571ce1027 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.604908] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865032, 'name': ReconfigVM_Task, 'duration_secs': 1.281361} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.605309] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.605839] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee2fb3c2-cb38-407c-9b75-21f452b7aeb6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.614621] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 994.614621] env[69982]: value = "task-3865033" [ 994.614621] env[69982]: _type = "Task" [ 994.614621] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.626866] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.701775] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.991288] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 994.991288] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-24ca3c63-2c35-492e-a074-133500b4f164 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.000812] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 995.000812] env[69982]: value = "task-3865034" [ 995.000812] env[69982]: _type = "Task" [ 995.000812] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.012313] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865034, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.069340] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ca7b9f-ced8-465a-8084-66dc978e41a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.074922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.075143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.075396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.075597] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.075805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.080212] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9214c5f9-9dac-4cbc-bccc-d86c7c0b96b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.085792] env[69982]: INFO nova.compute.manager [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Terminating instance [ 995.120788] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6cadaf-c88e-4228-beac-683da4475fe3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.132943] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0334b971-d5c5-45cb-9098-6d9ce067725c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.136883] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.138608] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 995.153090] env[69982]: DEBUG nova.compute.provider_tree [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.155377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.155606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.166866] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 995.167174] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 995.167334] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 995.167515] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 995.167661] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 995.167806] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 995.168034] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 995.168204] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 995.168370] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 995.168534] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 995.168709] env[69982]: DEBUG nova.virt.hardware [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 995.169870] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb119de9-20f7-4ddc-9dae-08982d6c7bed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.179702] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa294d4-60ff-4335-ac34-c1bf74ff6cca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.194688] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.200850] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Creating folder: Project (4a6e64ec89f146dc99789efa264c0610). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.204212] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d13524d-9028-4712-a85e-c90ad2a44d4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.214681] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865031, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.440517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.216065] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/b87792c6-b066-467e-ada6-8dd52594cca2/b87792c6-b066-467e-ada6-8dd52594cca2.vmdk to [datastore1] ab14774e-c834-41e9-bb3f-87722b51070e/ab14774e-c834-41e9-bb3f-87722b51070e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.216411] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Created folder: Project (4a6e64ec89f146dc99789efa264c0610) in parent group-v767796. [ 995.216579] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Creating folder: Instances. Parent ref: group-v768024. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 995.217319] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee369b9-c6db-4303-a2e6-a1fc120dbaba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.219787] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c75ca815-f583-4d7f-847b-6443712586ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.241655] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] ab14774e-c834-41e9-bb3f-87722b51070e/ab14774e-c834-41e9-bb3f-87722b51070e.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.243299] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6292038-f786-4717-ba61-ecb574bf7f64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.258179] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Created folder: Instances in parent group-v768024. [ 995.258457] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.259049] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.259693] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e042bc8c-b733-48e5-aa67-e15cfc1832e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.273636] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 995.273636] env[69982]: value = "task-3865037" [ 995.273636] env[69982]: _type = "Task" [ 995.273636] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.278743] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.278743] env[69982]: value = "task-3865038" [ 995.278743] env[69982]: _type = "Task" [ 995.278743] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.285557] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865037, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.293213] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865038, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.514028] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865034, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.590102] env[69982]: DEBUG nova.compute.manager [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 995.590357] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 995.591289] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbe6f0d-2af1-48d2-8ec0-f78524a4cbda {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.599138] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.599486] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de4f1aa8-2888-40ba-abfd-5554d5b35f57 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.606174] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 995.606174] env[69982]: value = "task-3865039" [ 995.606174] env[69982]: _type = "Task" [ 995.606174] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.615040] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3865039, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.626102] env[69982]: DEBUG oslo_vmware.api [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865033, 'name': PowerOnVM_Task, 'duration_secs': 0.966996} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.626387] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.629275] env[69982]: DEBUG nova.compute.manager [None req-d16d442f-8ed8-48fe-9a83-59246d2c38e2 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.630132] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010d7658-ed38-45a8-8f86-8f58eb2245c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.658021] env[69982]: DEBUG nova.scheduler.client.report [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 995.662307] env[69982]: DEBUG nova.compute.utils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.792072] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.796441] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865038, 'name': CreateVM_Task, 'duration_secs': 0.329411} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.796674] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.797173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.797340] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.797710] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 995.797947] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e67b1caf-f352-4732-b87e-1ca9585c8282 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.803425] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 995.803425] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e5e6b-ab50-2a17-5222-9b0c2bd478d1" [ 995.803425] env[69982]: _type = "Task" [ 995.803425] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.813461] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e5e6b-ab50-2a17-5222-9b0c2bd478d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.013993] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865034, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.117863] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3865039, 'name': PowerOffVM_Task, 'duration_secs': 0.26383} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.119342] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.119342] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.119342] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2faa665e-2aad-48b6-add5-ad46cda55469 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.165162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.167870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.960s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.169559] env[69982]: INFO nova.compute.claims [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.172582] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.017s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.191935] env[69982]: INFO nova.scheduler.client.report [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted allocations for instance 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa [ 996.284328] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865037, 'name': ReconfigVM_Task, 'duration_secs': 0.814743} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.284604] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Reconfigured VM instance instance-00000050 to attach disk [datastore1] ab14774e-c834-41e9-bb3f-87722b51070e/ab14774e-c834-41e9-bb3f-87722b51070e.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.285268] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58e7a18a-0854-4a7f-a1c8-40a75c924297 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.293203] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 996.293203] env[69982]: value = "task-3865041" [ 996.293203] env[69982]: _type = "Task" [ 996.293203] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.302295] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865041, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.314717] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e5e6b-ab50-2a17-5222-9b0c2bd478d1, 'name': SearchDatastore_Task, 'duration_secs': 0.036319} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.315038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.315287] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.315529] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.315679] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.315863] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.316149] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0ba7eea-21ef-4035-bf8d-4dafeb9f8ffe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.332056] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.332276] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.333058] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39abe62e-b113-4171-b658-7ed2cd9e63a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.338957] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 996.338957] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272b519-6dbc-2082-a147-5cf7cb41eebc" [ 996.338957] env[69982]: _type = "Task" [ 996.338957] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.347809] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272b519-6dbc-2082-a147-5cf7cb41eebc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.513375] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865034, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.703258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-70596d06-f7e9-4254-84ff-d798cde153c3 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.924s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.805060] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865041, 'name': Rename_Task, 'duration_secs': 0.451951} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.805060] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.805311] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acd40da6-a414-426e-994c-d963f66df67a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.812299] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 996.812299] env[69982]: value = "task-3865042" [ 996.812299] env[69982]: _type = "Task" [ 996.812299] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.821636] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865042, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.851232] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272b519-6dbc-2082-a147-5cf7cb41eebc, 'name': SearchDatastore_Task, 'duration_secs': 0.054394} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.852154] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-976f1d8e-1bc7-4d9b-87b6-d5c255c987c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.860503] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 996.860503] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9afff-bc28-62ac-999c-e325a01e6f0b" [ 996.860503] env[69982]: _type = "Task" [ 996.860503] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.869631] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9afff-bc28-62ac-999c-e325a01e6f0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.018295] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865034, 'name': CreateSnapshot_Task, 'duration_secs': 1.588883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.019249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 997.020414] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce026e6d-ad6f-4978-946f-c60b6beda817 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.220367] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.220604] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.220839] env[69982]: INFO nova.compute.manager [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Attaching volume 44d0e079-95f9-4407-b06b-9c0c5646922a to /dev/sdb [ 997.264643] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113085ae-a77b-4e4f-ac2f-4fbebedf0762 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.275442] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ce4e54-e4b9-4cf0-b265-a839b252bf7d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.293073] env[69982]: DEBUG nova.virt.block_device [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating existing volume attachment record: e8f59af0-18ae-4844-9e83-d141ed591e21 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 997.314925] env[69982]: INFO nova.compute.manager [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Unrescuing [ 997.315212] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.315360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.315525] env[69982]: DEBUG nova.network.neutron [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 997.328091] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865042, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.372103] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9afff-bc28-62ac-999c-e325a01e6f0b, 'name': SearchDatastore_Task, 'duration_secs': 0.010597} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.375617] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.375803] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.376234] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b37d3a49-6c57-4717-973f-267f928ec840 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.382966] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 997.382966] env[69982]: value = "task-3865043" [ 997.382966] env[69982]: _type = "Task" [ 997.382966] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.394871] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.511682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "48dbc665-8286-4d5d-af4e-1a85d1742952" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.511996] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.512416] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.512466] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.512618] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.517069] env[69982]: INFO nova.compute.manager [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Terminating instance [ 997.541242] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 997.544662] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-53d0203e-79c3-4508-82bc-374b3eb7d254 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.554474] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 997.554474] env[69982]: value = "task-3865047" [ 997.554474] env[69982]: _type = "Task" [ 997.554474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.617397] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00519643-f529-4b0c-8ca4-da262ada4f98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.627832] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8ca66f-4f0d-4c9c-b6ae-7a3edaab9749 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.666312] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed684be9-b875-4242-90bd-09f96dfa6a4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.676666] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8ef679-3fe6-4d34-90ba-8df75d891078 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.695626] env[69982]: DEBUG nova.compute.provider_tree [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.832689] env[69982]: DEBUG oslo_vmware.api [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865042, 'name': PowerOnVM_Task, 'duration_secs': 0.565208} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.833105] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.833309] env[69982]: INFO nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Took 17.90 seconds to spawn the instance on the hypervisor. [ 997.834585] env[69982]: DEBUG nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.835352] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374aea4c-bdf5-4a1b-8859-e3b83be0f0f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.899485] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865043, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.021399] env[69982]: DEBUG nova.compute.manager [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 998.021686] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 998.022785] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13165d3-ad8d-423c-a80e-272f808a185c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.032260] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 998.035214] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49558e23-f3fd-40ce-a755-19383dadc508 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.043595] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 998.043595] env[69982]: value = "task-3865048" [ 998.043595] env[69982]: _type = "Task" [ 998.043595] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.054192] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.066025] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865047, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.172052] env[69982]: DEBUG nova.network.neutron [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.199026] env[69982]: DEBUG nova.scheduler.client.report [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.360543] env[69982]: INFO nova.compute.manager [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Took 53.13 seconds to build instance. [ 998.396033] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.611636} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.396033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.396262] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.396478] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b3b1fea-48c6-4313-a7e9-43cf8ebb0a00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.405265] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 998.405265] env[69982]: value = "task-3865049" [ 998.405265] env[69982]: _type = "Task" [ 998.405265] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.416937] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865049, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.555062] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865048, 'name': PowerOffVM_Task, 'duration_secs': 0.240573} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.555062] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.555245] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.558387] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee8bd59c-933a-4ccf-b77d-361786c54f21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.566309] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865047, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.629697] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.629975] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.630188] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore2] 48dbc665-8286-4d5d-af4e-1a85d1742952 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.630494] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55b97363-d44a-4f0c-a476-20a82e49b33d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.637238] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 998.637238] env[69982]: value = "task-3865051" [ 998.637238] env[69982]: _type = "Task" [ 998.637238] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.646250] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.664893] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.665165] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.665361] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleting the datastore file [datastore1] 9a1de08e-3206-44cc-8d34-a5527faf9684 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.665649] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecd0938a-070b-4251-97fe-308da06b0243 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.676607] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.677343] env[69982]: DEBUG nova.objects.instance [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'flavor' on Instance uuid 331f218a-ad6b-4417-b56d-83113e0c92cb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.678946] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for the task: (returnval){ [ 998.678946] env[69982]: value = "task-3865052" [ 998.678946] env[69982]: _type = "Task" [ 998.678946] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.691556] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3865052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.705886] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.538s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.706566] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 998.709586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.215s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.709874] env[69982]: DEBUG nova.objects.instance [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lazy-loading 'resources' on Instance uuid 5b79fc38-ace3-4f94-8d1c-b77912f44a1d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.862794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-178ae116-52b1-4b55-b767-29dc47a4dd2d tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.772s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.916274] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865049, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074174} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.916570] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.917447] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4d5a9a-4980-445d-a0d6-b48df7059095 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.931491] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "ab14774e-c834-41e9-bb3f-87722b51070e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.932047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.932047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.932241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.932399] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.942945] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.944019] env[69982]: INFO nova.compute.manager [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Terminating instance [ 998.945570] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82c02d63-0723-4d03-8a49-240d6f0b7216 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.967856] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 998.967856] env[69982]: value = "task-3865053" [ 998.967856] env[69982]: _type = "Task" [ 998.967856] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.979559] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.065042] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865047, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.149399] env[69982]: DEBUG oslo_vmware.api [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301385} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.149678] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.149926] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.150154] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.150363] env[69982]: INFO nova.compute.manager [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Took 1.13 seconds to destroy the instance on the hypervisor. [ 999.150643] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.150852] env[69982]: DEBUG nova.compute.manager [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.150969] env[69982]: DEBUG nova.network.neutron [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 999.187726] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73101ff1-6a37-4794-8223-e3c5bfa8b254 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.198533] env[69982]: DEBUG oslo_vmware.api [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Task: {'id': task-3865052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324655} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.217017] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 999.217254] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 999.217440] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 999.217617] env[69982]: INFO nova.compute.manager [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Took 3.63 seconds to destroy the instance on the hypervisor. [ 999.217867] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.219859] env[69982]: DEBUG nova.compute.utils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 999.224225] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.227030] env[69982]: DEBUG nova.compute.manager [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 999.227030] env[69982]: DEBUG nova.network.neutron [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 999.228647] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 999.228819] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 999.230789] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c5673c1-56e5-4302-ad43-e2764e9e3977 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.240926] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 999.240926] env[69982]: value = "task-3865054" [ 999.240926] env[69982]: _type = "Task" [ 999.240926] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.258235] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.306961] env[69982]: DEBUG nova.policy [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b41a52cdf4c34d9c9b86c0debf922a36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '251f58d95d51416d9d9fd54aa14546e2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 999.463061] env[69982]: DEBUG nova.compute.manager [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 999.463333] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.464647] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bab3a6-c92e-47cf-96af-5cc578715abf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.482239] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.482547] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865053, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.482800] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c12f35c7-c10e-479e-af72-1690026cf177 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.493835] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 999.493835] env[69982]: value = "task-3865055" [ 999.493835] env[69982]: _type = "Task" [ 999.493835] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.504970] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.569731] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865047, 'name': CloneVM_Task, 'duration_secs': 1.722669} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.570032] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Created linked-clone VM from snapshot [ 999.571058] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19f16ef-85eb-4893-b5e6-d2fe0a70f2fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.583527] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploading image 8f3be3bb-91de-4918-95d8-14e1107e6dd5 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 999.645119] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 999.645119] env[69982]: value = "vm-768030" [ 999.645119] env[69982]: _type = "VirtualMachine" [ 999.645119] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 999.645477] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3aea4818-4c0a-4e4b-844a-1b38ca532289 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.657158] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease: (returnval){ [ 999.657158] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522aad64-3fb5-6422-8421-e0f7365155b0" [ 999.657158] env[69982]: _type = "HttpNfcLease" [ 999.657158] env[69982]: } obtained for exporting VM: (result){ [ 999.657158] env[69982]: value = "vm-768030" [ 999.657158] env[69982]: _type = "VirtualMachine" [ 999.657158] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 999.657564] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the lease: (returnval){ [ 999.657564] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522aad64-3fb5-6422-8421-e0f7365155b0" [ 999.657564] env[69982]: _type = "HttpNfcLease" [ 999.657564] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 999.669575] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 999.669575] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522aad64-3fb5-6422-8421-e0f7365155b0" [ 999.669575] env[69982]: _type = "HttpNfcLease" [ 999.669575] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 999.716282] env[69982]: DEBUG nova.compute.manager [req-75f00871-19f9-46de-b13e-0aec794553fa req-d42aea7e-5d82-41c5-9a72-7a7cb871c536 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Received event network-vif-deleted-e31f87a2-d500-40b5-a928-2217787b6c1a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.716655] env[69982]: INFO nova.compute.manager [req-75f00871-19f9-46de-b13e-0aec794553fa req-d42aea7e-5d82-41c5-9a72-7a7cb871c536 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Neutron deleted interface e31f87a2-d500-40b5-a928-2217787b6c1a; detaching it from the instance and deleting it from the info cache [ 999.716922] env[69982]: DEBUG nova.network.neutron [req-75f00871-19f9-46de-b13e-0aec794553fa req-d42aea7e-5d82-41c5-9a72-7a7cb871c536 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.725894] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 999.742517] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84fdf503-7c14-428d-b405-bd0e031953d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.760747] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a07a4a-c790-4eea-8bb4-858fea221e45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.765189] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865054, 'name': PowerOffVM_Task, 'duration_secs': 0.45373} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.765802] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.772129] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfiguring VM instance instance-00000043 to detach disk 2002 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 999.773029] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39e574a9-52fa-49f8-bd99-da78777d2c0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.820380] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c185a800-61f2-4eef-b7c5-a555457c05d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.826876] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 999.826876] env[69982]: value = "task-3865058" [ 999.826876] env[69982]: _type = "Task" [ 999.826876] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.834815] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109156f5-3a13-4e0a-a9b3-3130392bc7eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.843305] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865058, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.855028] env[69982]: DEBUG nova.compute.provider_tree [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.876809] env[69982]: DEBUG nova.compute.manager [req-f3ffe857-876c-428c-be22-48fe64bc90d3 req-64528596-c611-4f3c-9f53-8a24316a0cc8 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Received event network-vif-deleted-9b2905f3-5b4e-48cf-afd1-247e80a04280 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.877113] env[69982]: INFO nova.compute.manager [req-f3ffe857-876c-428c-be22-48fe64bc90d3 req-64528596-c611-4f3c-9f53-8a24316a0cc8 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Neutron deleted interface 9b2905f3-5b4e-48cf-afd1-247e80a04280; detaching it from the instance and deleting it from the info cache [ 999.877378] env[69982]: DEBUG nova.network.neutron [req-f3ffe857-876c-428c-be22-48fe64bc90d3 req-64528596-c611-4f3c-9f53-8a24316a0cc8 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.914090] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Successfully created port: ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 999.983103] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865053, 'name': ReconfigVM_Task, 'duration_secs': 0.713287} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.983410] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.984060] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fac3aebe-8656-4ab1-aa55-5dd774b6e8d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.990874] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 999.990874] env[69982]: value = "task-3865059" [ 999.990874] env[69982]: _type = "Task" [ 999.990874] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.002336] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865055, 'name': PowerOffVM_Task, 'duration_secs': 0.278786} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.005649] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1000.005834] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1000.006118] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865059, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.006335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25d52e95-e1d7-435e-8af8-cdc407fe68a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.073647] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.074207] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.074378] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore1] ab14774e-c834-41e9-bb3f-87722b51070e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.074714] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65b74ada-c459-4e6f-9185-7c3538ca11ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.082499] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1000.082499] env[69982]: value = "task-3865061" [ 1000.082499] env[69982]: _type = "Task" [ 1000.082499] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.092247] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.166704] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.166704] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522aad64-3fb5-6422-8421-e0f7365155b0" [ 1000.166704] env[69982]: _type = "HttpNfcLease" [ 1000.166704] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1000.167020] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1000.167020] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522aad64-3fb5-6422-8421-e0f7365155b0" [ 1000.167020] env[69982]: _type = "HttpNfcLease" [ 1000.167020] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1000.167830] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d66ba70-2d38-411c-bb41-9932168764d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.176271] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1000.176538] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1000.230972] env[69982]: DEBUG nova.network.neutron [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.236169] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3771e9a1-2692-4a81-8207-bb33cd2b94eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.249495] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef457ce-6a69-4c7f-be14-6d8172d5e208 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.271901] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3eea9f14-11eb-4f53-9ed7-74ebc4ca0bec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.289156] env[69982]: DEBUG nova.compute.manager [req-75f00871-19f9-46de-b13e-0aec794553fa req-d42aea7e-5d82-41c5-9a72-7a7cb871c536 service nova] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Detach interface failed, port_id=e31f87a2-d500-40b5-a928-2217787b6c1a, reason: Instance 48dbc665-8286-4d5d-af4e-1a85d1742952 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1000.325188] env[69982]: DEBUG nova.network.neutron [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.348732] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865058, 'name': ReconfigVM_Task, 'duration_secs': 0.267077} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.349024] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfigured VM instance instance-00000043 to detach disk 2002 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1000.349218] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.349482] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fc2e5bf-1fc8-4f7f-8a49-c4114ea04250 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.356832] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1000.356832] env[69982]: value = "task-3865062" [ 1000.356832] env[69982]: _type = "Task" [ 1000.356832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.357840] env[69982]: DEBUG nova.scheduler.client.report [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1000.373752] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.381503] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-61acbf52-adbf-4f30-b28a-3309e5384606 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.391469] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42c81ec-7aa3-4b7d-ae5c-ff24215a7b73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.429495] env[69982]: DEBUG nova.compute.manager [req-f3ffe857-876c-428c-be22-48fe64bc90d3 req-64528596-c611-4f3c-9f53-8a24316a0cc8 service nova] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Detach interface failed, port_id=9b2905f3-5b4e-48cf-afd1-247e80a04280, reason: Instance 9a1de08e-3206-44cc-8d34-a5527faf9684 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1000.503669] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865059, 'name': Rename_Task, 'duration_secs': 0.153556} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.504219] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1000.504459] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d2b27964-a7f3-45c7-be4b-d8a09b8d1215 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.510821] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1000.510821] env[69982]: value = "task-3865063" [ 1000.510821] env[69982]: _type = "Task" [ 1000.510821] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.519992] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.596041] env[69982]: DEBUG oslo_vmware.api [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.596672] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.596997] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.597338] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.597643] env[69982]: INFO nova.compute.manager [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1000.598058] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.598388] env[69982]: DEBUG nova.compute.manager [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1000.598594] env[69982]: DEBUG nova.network.neutron [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1000.733701] env[69982]: INFO nova.compute.manager [-] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Took 1.58 seconds to deallocate network for instance. [ 1000.741711] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1000.789105] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1000.789619] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1000.789892] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1000.790584] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1000.790584] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1000.790584] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1000.790795] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1000.790960] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1000.791325] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1000.791597] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1000.791801] env[69982]: DEBUG nova.virt.hardware [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1000.792963] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eecb891-7ad3-43e2-a6db-5636e5cfca49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.805219] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b83102-ab8a-4a1e-bb74-601ed3e45c5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.832101] env[69982]: INFO nova.compute.manager [-] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Took 1.60 seconds to deallocate network for instance. [ 1000.872225] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.163s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.874902] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865062, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.875758] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.859s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.876113] env[69982]: DEBUG nova.objects.instance [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lazy-loading 'resources' on Instance uuid ff2c680a-211a-44ad-b00d-1037f1fcb856 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1000.976579] env[69982]: INFO nova.scheduler.client.report [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted allocations for instance 5b79fc38-ace3-4f94-8d1c-b77912f44a1d [ 1001.029274] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865063, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.251471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.340226] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.368890] env[69982]: DEBUG oslo_vmware.api [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865062, 'name': PowerOnVM_Task, 'duration_secs': 0.708814} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.368890] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.368890] env[69982]: DEBUG nova.compute.manager [None req-dd846bef-2792-4353-b367-31cae9a7f027 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.369474] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38e137f-9565-482e-ad93-11b73a575632 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.486416] env[69982]: DEBUG oslo_concurrency.lockutils [None req-be4a9d9a-41b0-450b-8f17-928b7a3712a7 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "5b79fc38-ace3-4f94-8d1c-b77912f44a1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.394s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.525259] env[69982]: DEBUG oslo_vmware.api [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865063, 'name': PowerOnVM_Task, 'duration_secs': 0.606149} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.525259] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1001.525259] env[69982]: INFO nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Took 6.39 seconds to spawn the instance on the hypervisor. [ 1001.525259] env[69982]: DEBUG nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1001.525259] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0aee6e1-8da3-4d4b-b26a-4e9d0c5f98dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.546499] env[69982]: DEBUG nova.network.neutron [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.642245] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Successfully updated port: ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1001.749132] env[69982]: DEBUG nova.compute.manager [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Received event network-vif-plugged-ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.749132] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Acquiring lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.749132] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.749132] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.749132] env[69982]: DEBUG nova.compute.manager [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] No waiting events found dispatching network-vif-plugged-ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1001.749305] env[69982]: WARNING nova.compute.manager [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Received unexpected event network-vif-plugged-ad71a014-b2c7-40d6-8a8c-53a75e51870f for instance with vm_state building and task_state spawning. [ 1001.749405] env[69982]: DEBUG nova.compute.manager [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Received event network-changed-ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1001.749553] env[69982]: DEBUG nova.compute.manager [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Refreshing instance network info cache due to event network-changed-ad71a014-b2c7-40d6-8a8c-53a75e51870f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1001.752119] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Acquiring lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.752353] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Acquired lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.752548] env[69982]: DEBUG nova.network.neutron [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Refreshing network info cache for port ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1001.856317] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1001.856566] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1001.857865] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e436cebf-2c6e-4cf8-8b90-270cfe993428 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.880545] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4254bf18-8324-4b7f-919c-6a224a88eeba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.913645] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.916683] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66724e99-15da-4b10-b77a-46791fd89b3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.939906] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1001.939906] env[69982]: value = "task-3865064" [ 1001.939906] env[69982]: _type = "Task" [ 1001.939906] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.946193] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6d2031-e22a-45ba-b69f-e996797d53e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.960281] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62638fe-feb4-42a4-945d-ba483805f7fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.962694] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865064, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.998367] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebfb730-a0cd-4d4d-80ff-ae8d0df180cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.007567] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf31d7d-c2d9-4fff-aff4-8cafdfa82e40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.023817] env[69982]: DEBUG nova.compute.provider_tree [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.056994] env[69982]: INFO nova.compute.manager [-] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Took 1.46 seconds to deallocate network for instance. [ 1002.061554] env[69982]: INFO nova.compute.manager [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Took 40.41 seconds to build instance. [ 1002.147489] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.297398] env[69982]: DEBUG nova.compute.manager [req-5728b2c3-481c-43e1-baa7-08cf24e34665 req-5a636d3a-35cd-4f8f-b2c3-fb0a6f26cb62 service nova] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Received event network-vif-deleted-5664c82e-ad37-48c8-b71c-799c16942ae8 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.398947] env[69982]: DEBUG nova.network.neutron [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1002.456114] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865064, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.529815] env[69982]: DEBUG nova.scheduler.client.report [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1002.567522] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f49c9bd0-0498-4165-ba71-0d443181d5a2 tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.923s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.567522] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.587644] env[69982]: DEBUG nova.network.neutron [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.951845] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865064, 'name': ReconfigVM_Task, 'duration_secs': 0.595034} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.952909] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to attach disk [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.957119] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-542ecbaf-5c09-4d9b-ab7d-66a86686bcd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.974130] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1002.974130] env[69982]: value = "task-3865065" [ 1002.974130] env[69982]: _type = "Task" [ 1002.974130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.983386] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865065, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.036344] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.039095] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.774s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.039377] env[69982]: DEBUG nova.objects.instance [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lazy-loading 'resources' on Instance uuid 43a8e7dd-843b-49f6-9edb-60c2b380e9c2 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.060277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "187cbba3-3700-4c40-a514-28e08ea13426" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.060657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.090050] env[69982]: DEBUG oslo_concurrency.lockutils [req-69fe6549-52eb-4c84-95ed-33e5e23c524f req-032ae83b-bc62-41ca-b50e-f7081a0a7250 service nova] Releasing lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.090454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.090720] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1003.102976] env[69982]: INFO nova.scheduler.client.report [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Deleted allocations for instance ff2c680a-211a-44ad-b00d-1037f1fcb856 [ 1003.485572] env[69982]: DEBUG oslo_vmware.api [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865065, 'name': ReconfigVM_Task, 'duration_secs': 0.173899} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.485885] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1003.565440] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.572246] env[69982]: INFO nova.compute.manager [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Rebuilding instance [ 1003.612310] env[69982]: DEBUG oslo_concurrency.lockutils [None req-61cd6b03-efe6-411e-9693-88ea4ef7744d tempest-ServersWithSpecificFlavorTestJSON-2143707285 tempest-ServersWithSpecificFlavorTestJSON-2143707285-project-member] Lock "ff2c680a-211a-44ad-b00d-1037f1fcb856" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.885s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.641659] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1003.653700] env[69982]: DEBUG nova.compute.manager [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.654807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ad3f68-01c6-4739-b0f3-d1f521f39c4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.872036] env[69982]: DEBUG nova.network.neutron [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Updating instance_info_cache with network_info: [{"id": "ad71a014-b2c7-40d6-8a8c-53a75e51870f", "address": "fa:16:3e:eb:68:d4", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad71a014-b2", "ovs_interfaceid": "ad71a014-b2c7-40d6-8a8c-53a75e51870f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.014029] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7306eff1-1ba0-4a77-83a5-71d7dd5426a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.024744] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf187dfc-f4c1-4ad6-b90e-9cdbeeb2c049 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.069149] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6484139-a80c-4765-ab81-387dcc24a4e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.083133] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d032640-ae5d-439c-a8a2-501c8406af4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.096575] env[69982]: DEBUG nova.compute.provider_tree [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.098911] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.374236] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "refresh_cache-475a403c-bfdb-4239-b0d4-3baca441603f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.374576] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance network_info: |[{"id": "ad71a014-b2c7-40d6-8a8c-53a75e51870f", "address": "fa:16:3e:eb:68:d4", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapad71a014-b2", "ovs_interfaceid": "ad71a014-b2c7-40d6-8a8c-53a75e51870f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1004.375018] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:68:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ad71a014-b2c7-40d6-8a8c-53a75e51870f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1004.383033] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1004.383033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1004.383241] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67708a76-482f-441b-9c26-644c207ded0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.405791] env[69982]: DEBUG nova.compute.manager [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.406054] env[69982]: DEBUG nova.compute.manager [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing instance network info cache due to event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1004.406283] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.406425] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.406582] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1004.411308] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1004.411308] env[69982]: value = "task-3865066" [ 1004.411308] env[69982]: _type = "Task" [ 1004.411308] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.420911] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865066, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.600867] env[69982]: DEBUG nova.scheduler.client.report [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.673416] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.673974] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1205ffc-165b-4947-8be2-c9d6a3527af9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.683757] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1004.683757] env[69982]: value = "task-3865067" [ 1004.683757] env[69982]: _type = "Task" [ 1004.683757] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.695241] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.755631] env[69982]: DEBUG nova.objects.instance [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'flavor' on Instance uuid 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1004.929981] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865066, 'name': CreateVM_Task, 'duration_secs': 0.490866} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.929981] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.929981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.929981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.929981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.933648] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c195ba7-d847-456b-b089-db7a5a161a31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.939610] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1004.939610] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52442f0a-e86f-ab1f-063f-8d16ba4cc55f" [ 1004.939610] env[69982]: _type = "Task" [ 1004.939610] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.948808] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52442f0a-e86f-ab1f-063f-8d16ba4cc55f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.106795] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.109425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.102s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.109684] env[69982]: DEBUG nova.objects.instance [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'resources' on Instance uuid 5100234f-ea02-40bf-b883-fa9a159c7637 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.175340] env[69982]: INFO nova.scheduler.client.report [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Deleted allocations for instance 43a8e7dd-843b-49f6-9edb-60c2b380e9c2 [ 1005.197673] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865067, 'name': PowerOffVM_Task, 'duration_secs': 0.202017} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.202352] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.204109] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.205842] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8592e8a9-a0b5-48f7-9848-a35cb660a91e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.216755] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.217191] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf667427-b9d9-454c-99a3-0a8e97398550 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.252781] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.253245] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.253245] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Deleting the datastore file [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.254244] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updated VIF entry in instance network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1005.254582] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.255960] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3c58df1-4b85-46d1-9fef-3e12007452ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.261414] env[69982]: DEBUG oslo_concurrency.lockutils [None req-00bbb8b3-f8d0-4c96-81a7-077b98124720 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.041s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.276693] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1005.276693] env[69982]: value = "task-3865069" [ 1005.276693] env[69982]: _type = "Task" [ 1005.276693] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.279466] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.451421] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52442f0a-e86f-ab1f-063f-8d16ba4cc55f, 'name': SearchDatastore_Task, 'duration_secs': 0.015495} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.452020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.452614] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.453219] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.453406] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.453695] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.454119] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e71d13c7-de67-4bc9-ace0-d6b6bfd7cd83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.468825] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.471340] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.471340] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b26f85e8-4ada-4135-9fbe-03195fafb66c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.478243] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1005.478243] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52616d14-ceb6-7261-df56-3036b9360f35" [ 1005.478243] env[69982]: _type = "Task" [ 1005.478243] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.487689] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52616d14-ceb6-7261-df56-3036b9360f35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.686059] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e79f873b-74c8-4721-b5b2-3b94b52293bc tempest-ListImageFiltersTestJSON-1969169780 tempest-ListImageFiltersTestJSON-1969169780-project-member] Lock "43a8e7dd-843b-49f6-9edb-60c2b380e9c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.505s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.759914] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.759914] env[69982]: DEBUG nova.compute.manager [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1005.759914] env[69982]: DEBUG nova.compute.manager [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing instance network info cache due to event network-changed-4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1005.760149] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Acquiring lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.760282] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Acquired lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.761443] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Refreshing network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1005.784788] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146624} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.788017] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1005.788017] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1005.788017] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1005.992475] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52616d14-ceb6-7261-df56-3036b9360f35, 'name': SearchDatastore_Task, 'duration_secs': 0.013426} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.996511] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a8cf79b-db97-472b-823f-bdde7464beaf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.002810] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1006.002810] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c90fb4-f4a0-4cee-51c7-4f966a380022" [ 1006.002810] env[69982]: _type = "Task" [ 1006.002810] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.015273] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c90fb4-f4a0-4cee-51c7-4f966a380022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.182312] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3682f91-2721-452c-ae13-d92151199249 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.190968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edd4e3e-8bf3-46dd-8027-91beb88ff313 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.224253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e55a912-813e-416f-bc2c-f998fe35dadb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.233114] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab3a929-72f4-46ea-927b-8098130683c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.249869] env[69982]: DEBUG nova.compute.provider_tree [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.427438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.427438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.514907] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c90fb4-f4a0-4cee-51c7-4f966a380022, 'name': SearchDatastore_Task, 'duration_secs': 0.021396} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.515207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.515461] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 475a403c-bfdb-4239-b0d4-3baca441603f/475a403c-bfdb-4239-b0d4-3baca441603f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1006.515782] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13caf032-804f-4840-8527-db8f446e2163 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.524140] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1006.524140] env[69982]: value = "task-3865070" [ 1006.524140] env[69982]: _type = "Task" [ 1006.524140] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.536058] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.630091] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updated VIF entry in instance network info cache for port 4e8435e5-b58d-4662-bc23-35592b41251f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1006.630532] env[69982]: DEBUG nova.network.neutron [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [{"id": "4e8435e5-b58d-4662-bc23-35592b41251f", "address": "fa:16:3e:a2:09:a8", "network": {"id": "5d731e80-09cf-406e-b635-af147b3ee15e", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-893888564-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5102c0ae7ade4db1a99486f7632dbe3e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39cd75b0-9ec7-48ed-b57f-34da0c573a60", "external-id": "nsx-vlan-transportzone-751", "segmentation_id": 751, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e8435e5-b5", "ovs_interfaceid": "4e8435e5-b58d-4662-bc23-35592b41251f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.752895] env[69982]: DEBUG nova.scheduler.client.report [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.847408] env[69982]: INFO nova.compute.manager [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Rebuilding instance [ 1006.855647] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.856031] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.856203] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.856382] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.856529] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.856696] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.856933] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.857191] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.857273] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.857434] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.857607] env[69982]: DEBUG nova.virt.hardware [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.858636] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03d9724-35f1-45b9-9265-984f269740ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.868732] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452cac15-e3f5-4c9b-b570-651df16550cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.888857] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.894512] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.894692] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.894861] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a507173-9fb9-4416-ab34-f1e5b1f785f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.913481] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.913481] env[69982]: value = "task-3865071" [ 1006.913481] env[69982]: _type = "Task" [ 1006.913481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.925147] env[69982]: DEBUG nova.compute.manager [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1006.926093] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c31124-bd46-45b7-9281-370b6db14dd3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.933520] env[69982]: DEBUG nova.compute.utils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1006.934976] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865071, 'name': CreateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.778769] env[69982]: DEBUG oslo_concurrency.lockutils [req-0175e169-5c4e-487c-89c5-bbb946f67886 req-d1218596-8df7-4ea6-ab62-c08f8544ecff service nova] Releasing lock "refresh_cache-331f218a-ad6b-4417-b56d-83113e0c92cb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.779618] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.670s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.782167] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.355s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.788898] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.852s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.790397] env[69982]: INFO nova.compute.claims [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.804939] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865070, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.806322} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.808380] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 475a403c-bfdb-4239-b0d4-3baca441603f/475a403c-bfdb-4239-b0d4-3baca441603f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1007.808820] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1007.809371] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865071, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.809600] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76bbba20-b8d5-44f8-91aa-c418ad22b04b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.818582] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1007.818582] env[69982]: value = "task-3865072" [ 1007.818582] env[69982]: _type = "Task" [ 1007.818582] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.832622] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865072, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.863700] env[69982]: INFO nova.scheduler.client.report [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance 5100234f-ea02-40bf-b883-fa9a159c7637 [ 1008.286470] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.287190] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcf81b7c-d607-4bad-8cb1-86c2bfd972dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.295895] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865071, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.300055] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1008.300055] env[69982]: value = "task-3865073" [ 1008.300055] env[69982]: _type = "Task" [ 1008.300055] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.309594] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.333757] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865072, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082634} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.333757] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1008.333757] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa06a7eb-5097-440b-a0b7-8f368c9743b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.358381] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 475a403c-bfdb-4239-b0d4-3baca441603f/475a403c-bfdb-4239-b0d4-3baca441603f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1008.358760] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-648ff1ab-1d37-4aa6-b69e-3546f23ee6d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.376667] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb09923b-4404-47e2-b935-bf41236f41f3 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "5100234f-ea02-40bf-b883-fa9a159c7637" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.542s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.382822] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1008.382822] env[69982]: value = "task-3865074" [ 1008.382822] env[69982]: _type = "Task" [ 1008.382822] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.393081] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865074, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.694916] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.695326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.695644] env[69982]: INFO nova.compute.manager [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Attaching volume d0cf9a07-1efc-4787-be9e-c02fbe499c1d to /dev/sdb [ 1008.765610] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c42574-846a-4b7b-8e3e-3b1e1bc26b27 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.779996] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf317d3-caa2-4549-94ad-23a24095182b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.806099] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865071, 'name': CreateVM_Task, 'duration_secs': 1.663681} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.812842] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1008.813876] env[69982]: DEBUG nova.virt.block_device [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updating existing volume attachment record: af62b0fc-76e2-4ac3-a39e-e40f8b9d1c10 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1008.816257] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.816418] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.816725] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1008.817394] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31bc4f90-c790-4ac6-896b-28a14c4ca85e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.823788] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865073, 'name': PowerOffVM_Task, 'duration_secs': 0.422595} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.825349] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.829578] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1008.829578] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e85ba4-4996-70e9-111b-772741f08033" [ 1008.829578] env[69982]: _type = "Task" [ 1008.829578] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.844452] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e85ba4-4996-70e9-111b-772741f08033, 'name': SearchDatastore_Task, 'duration_secs': 0.012085} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.844756] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.844984] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.845236] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.845411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.845600] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1008.845874] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd9a06b4-46cc-421b-a866-4b00253b1f50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.856090] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1008.856287] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1008.860024] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59bc6e4e-10ff-4011-b474-9bbf599d437a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.864479] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1008.864479] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6fbf0-2ec5-de73-50e8-474927e0d9e9" [ 1008.864479] env[69982]: _type = "Task" [ 1008.864479] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.881685] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6fbf0-2ec5-de73-50e8-474927e0d9e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.896711] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865074, 'name': ReconfigVM_Task, 'duration_secs': 0.328005} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.897319] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 475a403c-bfdb-4239-b0d4-3baca441603f/475a403c-bfdb-4239-b0d4-3baca441603f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.898018] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38834fe6-f554-4c84-9fb9-bffa1a572d8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.904900] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1008.904900] env[69982]: value = "task-3865075" [ 1008.904900] env[69982]: _type = "Task" [ 1008.904900] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.914337] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865075, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.937477] env[69982]: INFO nova.compute.manager [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Detaching volume 44d0e079-95f9-4407-b06b-9c0c5646922a [ 1008.995636] env[69982]: INFO nova.virt.block_device [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Attempting to driver detach volume 44d0e079-95f9-4407-b06b-9c0c5646922a from mountpoint /dev/sdb [ 1008.995636] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1008.995755] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1008.997293] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f84920-3211-4270-a892-71a8d54e66b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.024914] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68297eb-b92d-4295-8616-f87f12ef14d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.035333] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ccee76-f34a-4add-8836-6ead8adde393 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.064524] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15624982-d0c9-43b6-92e6-db93a70f65e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.088050] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] The volume has not been displaced from its original location: [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1009.095376] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1009.097836] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-675cc338-5a12-4d13-a534-28b8e637dcbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.120955] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1009.120955] env[69982]: value = "task-3865079" [ 1009.120955] env[69982]: _type = "Task" [ 1009.120955] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.131922] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865079, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.378994] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6fbf0-2ec5-de73-50e8-474927e0d9e9, 'name': SearchDatastore_Task, 'duration_secs': 0.012449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.380446] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-772430cc-fd99-45cc-ba74-aa5da1953252 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.390882] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1009.390882] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52401fd8-335c-7af6-6348-dbc2dae7f281" [ 1009.390882] env[69982]: _type = "Task" [ 1009.390882] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.415458] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52401fd8-335c-7af6-6348-dbc2dae7f281, 'name': SearchDatastore_Task, 'duration_secs': 0.013191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.416800] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.417088] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.417523] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ccbf104-fc8b-4f4f-9df7-83598c80daed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.425762] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865075, 'name': Rename_Task, 'duration_secs': 0.167353} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.430422] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1009.431229] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-daf8a96f-8b22-40ee-8bb6-025cc168f74b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.437953] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1009.437953] env[69982]: value = "task-3865080" [ 1009.437953] env[69982]: _type = "Task" [ 1009.437953] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.445719] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1009.445719] env[69982]: value = "task-3865081" [ 1009.445719] env[69982]: _type = "Task" [ 1009.445719] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.462085] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865080, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.470712] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865081, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.485722] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e5b550-47da-4fa1-858d-d1356d4d8733 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.492370] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc675135-546d-4ec6-bb60-673ffbeed723 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.524878] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890acf50-d470-42f5-a591-ddb185a85f3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.535146] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16258e00-186c-4a6f-9a49-9d76bef65925 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.551120] env[69982]: DEBUG nova.compute.provider_tree [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.630325] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865079, 'name': ReconfigVM_Task, 'duration_secs': 0.323246} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.630824] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1009.636691] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ba8d5f0-6867-44f2-a71d-aa9bc27a46cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.654781] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1009.654781] env[69982]: value = "task-3865082" [ 1009.654781] env[69982]: _type = "Task" [ 1009.654781] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.664834] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865082, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.689456] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1009.689456] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3081dbd-21bc-471f-9f1f-96ab27555638 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.695290] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1009.695579] env[69982]: ERROR oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk due to incomplete transfer. [ 1009.695766] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cf66b249-3abf-4373-94b6-6cca21c34598 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.709457] env[69982]: DEBUG oslo_vmware.rw_handles [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5277303c-4941-3f15-9a60-ae3a5eaef796/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1009.709794] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Uploaded image 8f3be3bb-91de-4918-95d8-14e1107e6dd5 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1009.715580] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1009.715580] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-91dd56b4-9491-4f7f-865f-79202309abd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.722398] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1009.722398] env[69982]: value = "task-3865083" [ 1009.722398] env[69982]: _type = "Task" [ 1009.722398] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.735484] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865083, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.951762] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865080, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.965681] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865081, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.054683] env[69982]: DEBUG nova.scheduler.client.report [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.166482] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865082, 'name': ReconfigVM_Task, 'duration_secs': 0.211809} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.166814] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1010.233173] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865083, 'name': Destroy_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.449809] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865080, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758243} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.450200] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.450429] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.450772] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-afdb9a34-5233-48e3-b392-718569ff4eb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.464180] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1010.464180] env[69982]: value = "task-3865084" [ 1010.464180] env[69982]: _type = "Task" [ 1010.464180] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.464429] env[69982]: DEBUG oslo_vmware.api [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865081, 'name': PowerOnVM_Task, 'duration_secs': 0.619654} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.464761] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.464955] env[69982]: INFO nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1010.465149] env[69982]: DEBUG nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.468752] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd09099-1a21-4ac7-bfe1-940cdb241e6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.559931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.771s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.560481] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1010.563848] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.082s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.564097] env[69982]: DEBUG nova.objects.instance [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid 56ad2f3f-c24f-446e-8df7-09fde60ba6cd {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.735655] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865083, 'name': Destroy_Task, 'duration_secs': 0.600516} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.735959] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroyed the VM [ 1010.737067] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1010.737251] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-76aa94bb-3c53-4953-98f5-15d108cc0192 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.745051] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1010.745051] env[69982]: value = "task-3865085" [ 1010.745051] env[69982]: _type = "Task" [ 1010.745051] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.757454] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865085, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.978591] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865084, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09076} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.978988] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.979819] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c6bb58-4808-420c-81c5-c9c2e9f361cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.006365] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.006932] env[69982]: INFO nova.compute.manager [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Took 38.82 seconds to build instance. [ 1011.008047] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de18a743-ec18-4644-8fe6-e9ab9327ed9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.031305] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1011.031305] env[69982]: value = "task-3865086" [ 1011.031305] env[69982]: _type = "Task" [ 1011.031305] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.042406] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865086, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.057979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.058346] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.068250] env[69982]: DEBUG nova.compute.utils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.076140] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.076539] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.236425] env[69982]: DEBUG nova.policy [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbcdd79646354cceb351f24631c88f3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9649a2ef36434bc3bb6e6f7a908f1354', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.256833] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865085, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.262938] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1011.263561] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b57d70cb-bdec-4204-a1fc-588df18cf038 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.276292] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1011.276292] env[69982]: value = "task-3865088" [ 1011.276292] env[69982]: _type = "Task" [ 1011.276292] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.285662] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.526507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fcc2baea-b651-4591-96e4-03375ff97e54 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.346s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.542562] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865086, 'name': ReconfigVM_Task, 'duration_secs': 0.443567} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.542938] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4/719e6e11-ac2f-4b71-9a55-5c110f8aaba4.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.547871] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2cfc54b0-14da-4709-a42a-6a6dfc36b1e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.557193] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1011.557193] env[69982]: value = "task-3865089" [ 1011.557193] env[69982]: _type = "Task" [ 1011.557193] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.561058] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1011.575546] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865089, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.576124] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1011.587527] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cce0a8-1bec-47ca-ba40-1a8506b7fb87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.596071] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437d57c8-ac0e-4b8c-8664-e2b286ba997b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.631136] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97110b8-906a-42d4-8711-34f829d3ca44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.639347] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af27754-7cc0-49bd-9bfb-86e80575fc4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.654694] env[69982]: DEBUG nova.compute.provider_tree [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1011.755508] env[69982]: DEBUG oslo_vmware.api [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865085, 'name': RemoveSnapshot_Task, 'duration_secs': 0.655386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.755780] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1011.756090] env[69982]: INFO nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 17.30 seconds to snapshot the instance on the hypervisor. [ 1011.787310] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1011.787587] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1011.787823] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1011.788754] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b71f4ef-0a1d-4370-ad26-df0f216bd869 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.810843] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47cfea3-3bec-4d7c-808a-bea5badb7477 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.822698] env[69982]: WARNING nova.virt.vmwareapi.driver [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1011.822698] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1011.822698] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb14690-99b7-4a5a-93ea-b7e597e414f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.829274] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1011.829618] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2e51d6a-cd74-4ccc-8fb3-9fa1e28be39f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.896735] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.896735] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.896735] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.896735] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-401371f5-8f00-4ebf-aa4e-94ba862508de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.904916] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1011.904916] env[69982]: value = "task-3865091" [ 1011.904916] env[69982]: _type = "Task" [ 1011.904916] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.911819] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.067757] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865089, 'name': Rename_Task, 'duration_secs': 0.309383} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.068083] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.068311] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9976193a-5df9-43e9-807e-bce09ff61a83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.081456] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1012.081456] env[69982]: value = "task-3865092" [ 1012.081456] env[69982]: _type = "Task" [ 1012.081456] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.094575] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.101973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.158702] env[69982]: DEBUG nova.scheduler.client.report [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.188787] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "475a403c-bfdb-4239-b0d4-3baca441603f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.189150] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.189474] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.189779] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.190052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.197021] env[69982]: INFO nova.compute.manager [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Terminating instance [ 1012.256060] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Successfully created port: 6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.384095] env[69982]: DEBUG nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Found 3 images (rotation: 2) {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1012.384383] env[69982]: DEBUG nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Rotating out 1 backups {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1012.384598] env[69982]: DEBUG nova.compute.manager [None req-ec20054e-1998-4874-81c4-7d7f60ecae92 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleting image e6d8946f-e6b4-47f7-a539-ec50ec4ec3aa {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1012.414287] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221137} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.414550] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1012.414733] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1012.414904] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1012.592524] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1012.594686] env[69982]: DEBUG oslo_vmware.api [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865092, 'name': PowerOnVM_Task, 'duration_secs': 0.509601} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.599313] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.599637] env[69982]: DEBUG nova.compute.manager [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.600534] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0fca22-40b6-4deb-ae7b-abbb19046480 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.625987] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1012.625987] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.625987] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1012.626152] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.626152] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1012.626152] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1012.626152] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1012.626152] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1012.626312] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1012.626312] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1012.626312] env[69982]: DEBUG nova.virt.hardware [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1012.626312] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a639ebae-1b92-44af-9a30-9a9805c03ac8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.634772] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd45bbe6-8d83-489b-b097-f349046119d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.667949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.104s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.671350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.987s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.672911] env[69982]: INFO nova.compute.claims [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1012.702053] env[69982]: DEBUG nova.compute.manager [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1012.702163] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1012.703203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62fd31a-a4ef-43fb-b0e5-394fdd577c49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.718954] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.719107] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-000f6330-e141-448b-bebc-ceba99658042 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.727386] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1012.727386] env[69982]: value = "task-3865093" [ 1012.727386] env[69982]: _type = "Task" [ 1012.727386] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.737123] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.778940] env[69982]: INFO nova.scheduler.client.report [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance 56ad2f3f-c24f-446e-8df7-09fde60ba6cd [ 1012.920454] env[69982]: INFO nova.virt.block_device [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Booting with volume 44d0e079-95f9-4407-b06b-9c0c5646922a at /dev/sdb [ 1012.985022] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-10103c70-cff8-4b59-9903-56bd635718cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.996791] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a028bb-69dc-4080-880b-303c604d6aaf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.042734] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dba777ea-8a4c-4efc-b7da-029993eb6022 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.053616] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c6f067-69a9-4b21-88cf-dc60fc880ad4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.092199] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b15e77-dcc8-41f2-85f1-7e23283e0772 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.099650] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b0b96c-90f8-4626-bbc2-76dbd869f697 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.119629] env[69982]: DEBUG nova.virt.block_device [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating existing volume attachment record: 26994fd9-191a-425b-b71c-e924cc6ae672 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1013.125746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.238640] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865093, 'name': PowerOffVM_Task, 'duration_secs': 0.191838} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.238853] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1013.239019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1013.239293] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d28948d-d061-4162-8820-84e058a00ba3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.289273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b434370-c2d5-47c8-9787-d937c9e228b5 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "56ad2f3f-c24f-446e-8df7-09fde60ba6cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.342s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.303711] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1013.305932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1013.305932] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleting the datastore file [datastore2] 475a403c-bfdb-4239-b0d4-3baca441603f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1013.305932] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb95577e-2ca2-4940-95e9-57ab509ac365 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.313624] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1013.313624] env[69982]: value = "task-3865095" [ 1013.313624] env[69982]: _type = "Task" [ 1013.313624] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.325812] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.386935] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1013.387355] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768034', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'name': 'volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6d390a12-bfb4-4d91-9e83-a81560a08e1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'serial': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1013.388449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b88509-8d7a-4250-8e12-59ac8d890e3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.409923] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8766ca91-5a57-4295-8b24-810410920a70 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.442532] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d/volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1013.443583] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0b858ee-e00a-4afc-b199-fc81fa47359d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.467872] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1013.467872] env[69982]: value = "task-3865096" [ 1013.467872] env[69982]: _type = "Task" [ 1013.467872] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.482261] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865096, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.827293] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.983440] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865096, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.080616] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.080885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.081139] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.081372] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.081893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.083926] env[69982]: INFO nova.compute.manager [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Terminating instance [ 1014.237206] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08cb321e-670f-45cc-a440-776c356e5260 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.248866] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6186819e-2b39-41cf-8733-60c3335d9850 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.286513] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5f8cfa3-47fe-42c2-b090-097012a1e43b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.296605] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7d4a0c-9450-409e-bafa-67ec214579e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.313403] env[69982]: DEBUG nova.compute.provider_tree [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.331021] env[69982]: DEBUG oslo_vmware.api [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.633538} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.331021] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.331021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.331021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.331021] env[69982]: INFO nova.compute.manager [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1014.331455] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.331455] env[69982]: DEBUG nova.compute.manager [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1014.331455] env[69982]: DEBUG nova.network.neutron [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1014.483036] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865096, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.580378] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Successfully updated port: 6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.589799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "refresh_cache-719e6e11-ac2f-4b71-9a55-5c110f8aaba4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.589799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquired lock "refresh_cache-719e6e11-ac2f-4b71-9a55-5c110f8aaba4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.589927] env[69982]: DEBUG nova.network.neutron [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1014.820513] env[69982]: DEBUG nova.scheduler.client.report [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1014.915595] env[69982]: DEBUG nova.compute.manager [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Received event network-vif-plugged-6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.915595] env[69982]: DEBUG oslo_concurrency.lockutils [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] Acquiring lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1014.915896] env[69982]: DEBUG oslo_concurrency.lockutils [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.915896] env[69982]: DEBUG oslo_concurrency.lockutils [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.917072] env[69982]: DEBUG nova.compute.manager [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] No waiting events found dispatching network-vif-plugged-6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1014.917072] env[69982]: WARNING nova.compute.manager [req-ca3782b5-a9ed-4961-94f6-79423c39de45 req-15fe21b7-33da-4c1f-bcdc-9d3e06179ee5 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Received unexpected event network-vif-plugged-6722e358-b16e-4178-86f7-7f3fc7636466 for instance with vm_state building and task_state spawning. [ 1015.000141] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865096, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.085772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.088181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquired lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.088181] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1015.122270] env[69982]: DEBUG nova.network.neutron [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1015.135680] env[69982]: DEBUG nova.compute.manager [req-96186f32-48e8-4edc-b69d-367e25dfa9ec req-566208f0-7356-43bd-8151-68792b314284 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Received event network-vif-deleted-ad71a014-b2c7-40d6-8a8c-53a75e51870f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.136610] env[69982]: INFO nova.compute.manager [req-96186f32-48e8-4edc-b69d-367e25dfa9ec req-566208f0-7356-43bd-8151-68792b314284 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Neutron deleted interface ad71a014-b2c7-40d6-8a8c-53a75e51870f; detaching it from the instance and deleting it from the info cache [ 1015.136610] env[69982]: DEBUG nova.network.neutron [req-96186f32-48e8-4edc-b69d-367e25dfa9ec req-566208f0-7356-43bd-8151-68792b314284 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.258868] env[69982]: DEBUG nova.network.neutron [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.326873] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.655s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.327878] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1015.332511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.449s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.332511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.335775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.036s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.335775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.335775] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1015.335963] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.892s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.337506] env[69982]: DEBUG nova.objects.instance [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lazy-loading 'resources' on Instance uuid 8358b105-7276-4292-804d-534f9fb1535e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.342070] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1015.342070] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1015.342070] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1015.342432] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1015.342432] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1015.342432] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1015.342432] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1015.342432] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1015.342739] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1015.342739] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1015.342841] env[69982]: DEBUG nova.virt.hardware [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1015.344390] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bebe5ed6-5609-4c2d-88c6-9a19251f8c7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.352033] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3d7799-f322-4de1-b475-f5338df455a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.363689] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76cf0c1-c1db-48bc-9626-e2235045a421 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.372612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b462905-2fd6-4ac1-a459-ee10155029cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.392044] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:fc:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18a2c6c8-1313-42eb-a058-40e272e7fda3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.400506] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1015.410439] env[69982]: INFO nova.scheduler.client.report [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocations for instance 9b733e1e-0532-4d91-a460-6b1f1971f388 [ 1015.411934] env[69982]: DEBUG nova.network.neutron [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.413159] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1015.417366] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-57a0b089-b194-4b70-a383-eadac6c52b66 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.433228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ad47a2-e0e5-4c50-bfa9-cf28e17527eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.447226] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1015.447226] env[69982]: value = "task-3865097" [ 1015.447226] env[69982]: _type = "Task" [ 1015.447226] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.448037] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b35d02-d574-4fd4-bfb0-deb6bd33f2af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.461840] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865097, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.489124] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177297MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1015.489294] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.499742] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865096, 'name': ReconfigVM_Task, 'duration_secs': 1.914121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.500087] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfigured VM instance instance-0000004b to attach disk [datastore2] volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d/volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.505791] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52db16c1-95d6-4c53-9ebf-df6eb6bc2c44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.531691] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1015.531691] env[69982]: value = "task-3865098" [ 1015.531691] env[69982]: _type = "Task" [ 1015.531691] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.546295] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865098, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.629514] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1015.639349] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b34578f-924c-4760-875f-e5557a648763 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.652742] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a555335-9a3c-4914-990c-e19e608ae741 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.701828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.702378] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.704458] env[69982]: DEBUG nova.compute.manager [req-96186f32-48e8-4edc-b69d-367e25dfa9ec req-566208f0-7356-43bd-8151-68792b314284 service nova] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Detach interface failed, port_id=ad71a014-b2c7-40d6-8a8c-53a75e51870f, reason: Instance 475a403c-bfdb-4239-b0d4-3baca441603f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1015.762253] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Releasing lock "refresh_cache-719e6e11-ac2f-4b71-9a55-5c110f8aaba4" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.763031] env[69982]: DEBUG nova.compute.manager [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1015.763031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.764997] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14db698-8e0a-4c79-a47a-7893e75a01e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.776867] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.780306] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56bde17d-939a-47da-85b9-2f996df2fbf5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.789290] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1015.789290] env[69982]: value = "task-3865099" [ 1015.789290] env[69982]: _type = "Task" [ 1015.789290] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.800551] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865099, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.846208] env[69982]: DEBUG nova.compute.utils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1015.849192] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1015.849192] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1015.913131] env[69982]: DEBUG nova.policy [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99858c5fbda7454cab0188cf368e51f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83b53a0998874810b5302415624592cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1015.916079] env[69982]: DEBUG nova.network.neutron [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Updating instance_info_cache with network_info: [{"id": "6722e358-b16e-4178-86f7-7f3fc7636466", "address": "fa:16:3e:4a:f6:33", "network": {"id": "d13ca4aa-924d-4807-afea-aed147e39bdd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-68239168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9649a2ef36434bc3bb6e6f7a908f1354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6722e358-b1", "ovs_interfaceid": "6722e358-b16e-4178-86f7-7f3fc7636466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.944493] env[69982]: INFO nova.compute.manager [-] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Took 1.61 seconds to deallocate network for instance. [ 1015.946557] env[69982]: DEBUG oslo_concurrency.lockutils [None req-491ced3a-c3ca-4b87-881a-b6711db14859 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "9b733e1e-0532-4d91-a460-6b1f1971f388" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.075s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.964823] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865097, 'name': CreateVM_Task, 'duration_secs': 0.405864} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.964823] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1015.964823] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.964823] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.965089] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1015.968483] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3f2edba-5b45-4fba-9146-cbc42834ca73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.972711] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1015.972711] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c2d90-4541-11db-bfb4-f31160826077" [ 1015.972711] env[69982]: _type = "Task" [ 1015.972711] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.988301] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c2d90-4541-11db-bfb4-f31160826077, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.050419] env[69982]: DEBUG oslo_vmware.api [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865098, 'name': ReconfigVM_Task, 'duration_secs': 0.254376} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.050419] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768034', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'name': 'volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6d390a12-bfb4-4d91-9e83-a81560a08e1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'serial': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1016.205952] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1016.308694] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865099, 'name': PowerOffVM_Task, 'duration_secs': 0.285063} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.310078] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.310078] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.310078] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7559e0f4-6f59-4b11-8ea4-182bbd6bb510 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.342056] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.342591] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.342591] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Deleting the datastore file [datastore2] 719e6e11-ac2f-4b71-9a55-5c110f8aaba4 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.343082] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05a6e6e2-9cc0-48c6-8088-64be9a4e8dea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.351974] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1016.357333] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for the task: (returnval){ [ 1016.357333] env[69982]: value = "task-3865101" [ 1016.357333] env[69982]: _type = "Task" [ 1016.357333] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.368381] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865101, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.423520] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Releasing lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.423520] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Instance network_info: |[{"id": "6722e358-b16e-4178-86f7-7f3fc7636466", "address": "fa:16:3e:4a:f6:33", "network": {"id": "d13ca4aa-924d-4807-afea-aed147e39bdd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-68239168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9649a2ef36434bc3bb6e6f7a908f1354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6722e358-b1", "ovs_interfaceid": "6722e358-b16e-4178-86f7-7f3fc7636466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1016.425077] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:f6:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b8b5b5e2-866d-4ab5-b74d-4a47de0c4877', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6722e358-b16e-4178-86f7-7f3fc7636466', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.433561] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Creating folder: Project (9649a2ef36434bc3bb6e6f7a908f1354). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.433813] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7e227dd-6f34-42a4-8623-376d41415af2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.441851] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8a1af7-1bee-4e40-9a7d-42aaef74f72d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.451717] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb11fd7-3503-4bba-a407-4dca08fe72e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.459326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.459689] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Created folder: Project (9649a2ef36434bc3bb6e6f7a908f1354) in parent group-v767796. [ 1016.459822] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Creating folder: Instances. Parent ref: group-v768036. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1016.460512] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09245d52-71ef-424c-96ff-95803cb6483f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.493972] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cc3b8c-aa63-4432-83b5-d1a03bc565c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.496786] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Created folder: Instances in parent group-v768036. [ 1016.497072] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.497303] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.499338] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f0aa4fb-0470-47b9-9ed6-4b3b09bcdea4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.520167] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c2d90-4541-11db-bfb4-f31160826077, 'name': SearchDatastore_Task, 'duration_secs': 0.018033} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.520702] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4af163-2e2a-4f92-b6ca-1f446efc9598 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.525129] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.525405] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1016.525653] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.525804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.526015] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.527861] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3b5f244-aed2-48c6-84c9-0b7bb82749bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.530149] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.530149] env[69982]: value = "task-3865104" [ 1016.530149] env[69982]: _type = "Task" [ 1016.530149] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.550019] env[69982]: DEBUG nova.compute.provider_tree [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.552860] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865104, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.560383] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.560674] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1016.561458] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21a6843b-f447-4b37-a6bd-23b05ab5df65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.568175] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1016.568175] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c8073-efd1-79d0-5b74-3f619c1753d8" [ 1016.568175] env[69982]: _type = "Task" [ 1016.568175] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.579132] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c8073-efd1-79d0-5b74-3f619c1753d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.695804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.696262] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.720328] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Successfully created port: 35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1016.736566] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.877592] env[69982]: DEBUG oslo_vmware.api [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Task: {'id': task-3865101, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410697} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.877856] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1016.878274] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1016.878576] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1016.878775] env[69982]: INFO nova.compute.manager [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1016.879141] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.879402] env[69982]: DEBUG nova.compute.manager [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1016.879548] env[69982]: DEBUG nova.network.neutron [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1016.904797] env[69982]: DEBUG nova.network.neutron [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1017.044704] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865104, 'name': CreateVM_Task, 'duration_secs': 0.352961} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.044878] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.045794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.045968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.046336] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.046632] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dc5a9c0-f55e-483e-add1-b82900a04367 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.052917] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1017.052917] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296db36-1c8f-2632-cdd4-db4a4b561b49" [ 1017.052917] env[69982]: _type = "Task" [ 1017.052917] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.057155] env[69982]: DEBUG nova.scheduler.client.report [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.063360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.063360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.063360] env[69982]: DEBUG nova.compute.manager [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.063360] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74b98b4-7526-4b12-b7cc-ce75c81979fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.069320] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296db36-1c8f-2632-cdd4-db4a4b561b49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.074474] env[69982]: DEBUG nova.compute.manager [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1017.075163] env[69982]: DEBUG nova.objects.instance [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.087117] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c8073-efd1-79d0-5b74-3f619c1753d8, 'name': SearchDatastore_Task, 'duration_secs': 0.018496} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.088085] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ae0f29f-3f88-4e0c-9abf-078ee25f0755 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.096806] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1017.096806] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d0d7d3-2ed8-6275-e75e-0b8fb17b28f0" [ 1017.096806] env[69982]: _type = "Task" [ 1017.096806] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.112192] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d0d7d3-2ed8-6275-e75e-0b8fb17b28f0, 'name': SearchDatastore_Task, 'duration_secs': 0.009747} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.113902] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.113902] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1017.115302] env[69982]: DEBUG nova.compute.manager [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Received event network-changed-6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1017.115504] env[69982]: DEBUG nova.compute.manager [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Refreshing instance network info cache due to event network-changed-6722e358-b16e-4178-86f7-7f3fc7636466. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1017.115716] env[69982]: DEBUG oslo_concurrency.lockutils [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] Acquiring lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.115862] env[69982]: DEBUG oslo_concurrency.lockutils [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] Acquired lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.116029] env[69982]: DEBUG nova.network.neutron [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Refreshing network info cache for port 6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1017.117721] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38b94f38-7d4a-4427-a153-f996e9b73730 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.128038] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1017.128038] env[69982]: value = "task-3865105" [ 1017.128038] env[69982]: _type = "Task" [ 1017.128038] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.142560] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.151153] env[69982]: DEBUG nova.objects.instance [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid 6d390a12-bfb4-4d91-9e83-a81560a08e1a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.202019] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1017.370139] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1017.407875] env[69982]: DEBUG nova.network.neutron [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.435729] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1017.435729] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1017.435946] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1017.435946] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1017.436116] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1017.436265] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1017.436476] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1017.436661] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1017.436869] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1017.437052] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1017.437228] env[69982]: DEBUG nova.virt.hardware [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1017.438512] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7400c265-569b-4c58-9bbe-8bb714356b87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.451675] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db21c988-61ad-490b-8793-9c07a0101834 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.564021] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5296db36-1c8f-2632-cdd4-db4a4b561b49, 'name': SearchDatastore_Task, 'duration_secs': 0.019449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.564380] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.564630] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.564871] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.565033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.565253] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.565532] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e433964-b3a4-46e4-961d-5d7f89c53e21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.568132] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.570403] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.990s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.571664] env[69982]: DEBUG nova.objects.instance [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lazy-loading 'resources' on Instance uuid bba6f430-5af5-4d8a-9cf4-082207c170a5 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.586077] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.586077] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.586077] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44decc32-977b-41d1-ba03-7339d4fea665 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.595225] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1017.595225] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526aac23-101a-df8d-302f-c6d9962283d9" [ 1017.595225] env[69982]: _type = "Task" [ 1017.595225] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.612700] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526aac23-101a-df8d-302f-c6d9962283d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009176} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.613575] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da91824-6fc9-4105-86be-67882cfd0cea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.623954] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1017.623954] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521c648a-228d-cf57-f7c5-053959c4c49b" [ 1017.623954] env[69982]: _type = "Task" [ 1017.623954] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.637158] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521c648a-228d-cf57-f7c5-053959c4c49b, 'name': SearchDatastore_Task, 'duration_secs': 0.009385} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.638168] env[69982]: INFO nova.scheduler.client.report [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted allocations for instance 8358b105-7276-4292-804d-534f9fb1535e [ 1017.639609] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.639882] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3edfba43-55e9-4180-bb03-ce008af3a7d7/3edfba43-55e9-4180-bb03-ce008af3a7d7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1017.642583] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9651e5c-3b2d-4aaf-aa92-0f4f1a9a22fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.650677] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482301} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.650677] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1017.650677] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1017.650677] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e2eefc48-719a-4d6b-8724-7ac7bf57f22d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.654150] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1017.654150] env[69982]: value = "task-3865106" [ 1017.654150] env[69982]: _type = "Task" [ 1017.654150] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.660870] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1017.660870] env[69982]: value = "task-3865107" [ 1017.660870] env[69982]: _type = "Task" [ 1017.660870] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.661632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d2c726df-54f4-4f82-8eb9-f2840f04a006 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.966s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.673705] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.678848] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.734967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.917036] env[69982]: INFO nova.compute.manager [-] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Took 1.03 seconds to deallocate network for instance. [ 1017.983958] env[69982]: DEBUG nova.network.neutron [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Updated VIF entry in instance network info cache for port 6722e358-b16e-4178-86f7-7f3fc7636466. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1017.983958] env[69982]: DEBUG nova.network.neutron [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Updating instance_info_cache with network_info: [{"id": "6722e358-b16e-4178-86f7-7f3fc7636466", "address": "fa:16:3e:4a:f6:33", "network": {"id": "d13ca4aa-924d-4807-afea-aed147e39bdd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-68239168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9649a2ef36434bc3bb6e6f7a908f1354", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b8b5b5e2-866d-4ab5-b74d-4a47de0c4877", "external-id": "nsx-vlan-transportzone-74", "segmentation_id": 74, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6722e358-b1", "ovs_interfaceid": "6722e358-b16e-4178-86f7-7f3fc7636466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.086168] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.086168] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22945186-b9fa-476e-a37a-2fc1ebaa20b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.094065] env[69982]: DEBUG oslo_vmware.api [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1018.094065] env[69982]: value = "task-3865108" [ 1018.094065] env[69982]: _type = "Task" [ 1018.094065] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.105333] env[69982]: DEBUG oslo_vmware.api [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.151754] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f87b1c2a-779d-43dc-af47-33a8260a3f29 tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "8358b105-7276-4292-804d-534f9fb1535e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.103s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.171787] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.185798] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068189} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.186252] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1018.187111] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55940d4b-13d9-4b88-b7be-efbc30ccfe7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.212727] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.216230] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed1a3955-72fc-4d12-bc40-02951c522c2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.237976] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1018.237976] env[69982]: value = "task-3865109" [ 1018.237976] env[69982]: _type = "Task" [ 1018.237976] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.248710] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865109, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.418643] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.487590] env[69982]: DEBUG oslo_concurrency.lockutils [req-d49b5cf8-1f1b-4305-a97d-3c5efa3492ef req-38266ef4-7314-4463-abae-bf9fefe7ecde service nova] Releasing lock "refresh_cache-3edfba43-55e9-4180-bb03-ce008af3a7d7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.545014] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3e0c7a-831b-4367-9377-412c9925088c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.553268] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f94dc3-3697-4d55-a003-54c3b8f8b2b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.592023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b0de9e-572f-4a34-807b-02573c0d4769 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.602524] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31980242-88e7-4034-b49a-d4f4fb330ba9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.611911] env[69982]: DEBUG oslo_vmware.api [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865108, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.621696] env[69982]: DEBUG nova.compute.provider_tree [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.666456] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865106, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.748672] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865109, 'name': ReconfigVM_Task, 'duration_secs': 0.319769} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.748842] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82/6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.750243] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'device_name': '/dev/sda', 'encryption_format': None, 'size': 0, 'guest_format': None, 'encrypted': False, 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'disk_bus': None, 'image_id': 'a4e69d6f-1c15-4f57-92a8-5e81c6be8172'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'}, 'guest_format': None, 'attachment_id': '26994fd9-191a-425b-b71c-e924cc6ae672', 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69982) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1018.750562] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1018.750823] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1018.751730] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a7a873-d35f-41a6-9d97-38f52b75700c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.769799] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b8f294-5a6d-41a2-a0d2-ccc0a7e7c9a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.797412] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1018.797835] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-069f0034-8565-4438-bb19-89b102fd334e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.819151] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1018.819151] env[69982]: value = "task-3865110" [ 1018.819151] env[69982]: _type = "Task" [ 1018.819151] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.832048] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.923226] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Successfully updated port: 35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1019.107735] env[69982]: DEBUG oslo_vmware.api [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865108, 'name': PowerOffVM_Task, 'duration_secs': 0.734573} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.108398] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.108398] env[69982]: DEBUG nova.compute.manager [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1019.110291] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadce205-52b8-4f72-b328-15915e56b283 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.128616] env[69982]: DEBUG nova.scheduler.client.report [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.167136] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865106, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.48589} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.167485] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 3edfba43-55e9-4180-bb03-ce008af3a7d7/3edfba43-55e9-4180-bb03-ce008af3a7d7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.167903] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.168043] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4f6bbb7-d20f-47f4-be71-ce0ecf9bfa39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.177116] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1019.177116] env[69982]: value = "task-3865111" [ 1019.177116] env[69982]: _type = "Task" [ 1019.177116] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.191218] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.202480] env[69982]: DEBUG nova.compute.manager [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Received event network-vif-plugged-35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.202678] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.202890] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.203079] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.203416] env[69982]: DEBUG nova.compute.manager [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] No waiting events found dispatching network-vif-plugged-35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1019.203547] env[69982]: WARNING nova.compute.manager [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Received unexpected event network-vif-plugged-35894e50-7421-402a-91f6-e5a640cebd85 for instance with vm_state building and task_state spawning. [ 1019.203704] env[69982]: DEBUG nova.compute.manager [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Received event network-changed-35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.203853] env[69982]: DEBUG nova.compute.manager [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Refreshing instance network info cache due to event network-changed-35894e50-7421-402a-91f6-e5a640cebd85. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1019.204080] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Acquiring lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.205175] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Acquired lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.205175] env[69982]: DEBUG nova.network.neutron [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Refreshing network info cache for port 35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1019.330142] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865110, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.425374] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.626860] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e67269d2-efca-4bbd-b3b0-3ce05aab3959 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.566s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.636060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.065s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.639028] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.387s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.639028] env[69982]: DEBUG nova.objects.instance [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lazy-loading 'resources' on Instance uuid 48dbc665-8286-4d5d-af4e-1a85d1742952 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.680858] env[69982]: INFO nova.scheduler.client.report [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Deleted allocations for instance bba6f430-5af5-4d8a-9cf4-082207c170a5 [ 1019.693254] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076744} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.693566] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.694499] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0351970-4215-4854-83e0-e75fa5d8405e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.730460] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 3edfba43-55e9-4180-bb03-ce008af3a7d7/3edfba43-55e9-4180-bb03-ce008af3a7d7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.731411] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdac9335-367e-4766-b0a5-052b66918dd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.753514] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1019.753514] env[69982]: value = "task-3865112" [ 1019.753514] env[69982]: _type = "Task" [ 1019.753514] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.763391] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865112, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.805687] env[69982]: DEBUG nova.network.neutron [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1019.830131] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865110, 'name': ReconfigVM_Task, 'duration_secs': 0.850065} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.830370] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to attach disk [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1019.835396] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a84e4b98-bb14-4c40-9fa5-145c72d36bfc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.851877] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1019.851877] env[69982]: value = "task-3865113" [ 1019.851877] env[69982]: _type = "Task" [ 1019.851877] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.861696] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.969172] env[69982]: DEBUG nova.network.neutron [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.192163] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8fe81bb3-abf0-4c59-b5fb-60556f618ebe tempest-ListServersNegativeTestJSON-1689300438 tempest-ListServersNegativeTestJSON-1689300438-project-member] Lock "bba6f430-5af5-4d8a-9cf4-082207c170a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.064s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.270016] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865112, 'name': ReconfigVM_Task, 'duration_secs': 0.345141} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.270016] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 3edfba43-55e9-4180-bb03-ce008af3a7d7/3edfba43-55e9-4180-bb03-ce008af3a7d7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.270016] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8705f60d-1957-4c33-a216-e86d06d6be4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.280027] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1020.280027] env[69982]: value = "task-3865114" [ 1020.280027] env[69982]: _type = "Task" [ 1020.280027] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.294536] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865114, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.365821] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865113, 'name': ReconfigVM_Task, 'duration_secs': 0.181969} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.370490] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1020.370490] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2671f85b-c5d7-40db-bdda-56563e1e9620 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.381304] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1020.381304] env[69982]: value = "task-3865115" [ 1020.381304] env[69982]: _type = "Task" [ 1020.381304] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.390567] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.415214] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.415473] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.474797] env[69982]: DEBUG oslo_concurrency.lockutils [req-3d73e827-be9e-4f96-a434-718a5e1cc2fa req-b7ae9aa2-6a80-4452-85cd-08324daa54bb service nova] Releasing lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.477012] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.477012] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1020.611557] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a566bf-86d1-47b9-a683-f3ee5ca7c616 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.621198] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a90d44d0-f2cf-43f9-a140-a55d15a319d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.659572] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dffc869-3705-4823-8b1b-59e651478395 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.669252] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa89ea4-fddb-4ae0-9cc5-1d7f5752b928 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.685262] env[69982]: DEBUG nova.compute.provider_tree [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.789514] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865114, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.888752] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.924945] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1021.021404] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1021.189712] env[69982]: DEBUG nova.scheduler.client.report [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.232948] env[69982]: DEBUG nova.network.neutron [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [{"id": "35894e50-7421-402a-91f6-e5a640cebd85", "address": "fa:16:3e:fe:6f:b0", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35894e50-74", "ovs_interfaceid": "35894e50-7421-402a-91f6-e5a640cebd85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.296252] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865114, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.395152] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.454338] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.696315] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.703569] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.362s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.703931] env[69982]: DEBUG nova.objects.instance [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lazy-loading 'resources' on Instance uuid 9a1de08e-3206-44cc-8d34-a5527faf9684 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1021.736942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.737516] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance network_info: |[{"id": "35894e50-7421-402a-91f6-e5a640cebd85", "address": "fa:16:3e:fe:6f:b0", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35894e50-74", "ovs_interfaceid": "35894e50-7421-402a-91f6-e5a640cebd85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1021.738334] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:6f:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '35894e50-7421-402a-91f6-e5a640cebd85', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1021.749738] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.752139] env[69982]: INFO nova.scheduler.client.report [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted allocations for instance 48dbc665-8286-4d5d-af4e-1a85d1742952 [ 1021.752238] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1021.752512] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0e12b9f-e89b-4da2-9ec4-58d61737a48d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.780675] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1021.780675] env[69982]: value = "task-3865116" [ 1021.780675] env[69982]: _type = "Task" [ 1021.780675] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.795238] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865116, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.802023] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865114, 'name': Rename_Task, 'duration_secs': 1.1647} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.802023] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.802023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8649da17-ff6a-4ed4-ad14-9cbdc4ea731b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.808692] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1021.808692] env[69982]: value = "task-3865117" [ 1021.808692] env[69982]: _type = "Task" [ 1021.808692] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.824673] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865117, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.895780] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.250470] env[69982]: DEBUG nova.compute.manager [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Stashing vm_state: stopped {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1022.278663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a49d7ef-84a7-4b72-8b21-8b098ce6af04 tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "48dbc665-8286-4d5d-af4e-1a85d1742952" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.767s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.297292] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865116, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.322827] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865117, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.398370] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.471336] env[69982]: INFO nova.compute.manager [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Rebuilding instance [ 1022.521215] env[69982]: DEBUG nova.compute.manager [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.522183] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873f24e3-0d34-493f-a3a8-0f2180ae8566 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.638912] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183c6f08-384d-40b2-ac81-d74cb6c2752d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.649182] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477c4e36-d05f-4ff7-8d47-7b2a6eb3f778 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.685106] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6e812d1-d025-4d32-b193-2ed4b85e330f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.693541] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25f5324-21a7-4068-8679-1c111c96a273 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.708784] env[69982]: DEBUG nova.compute.provider_tree [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.770878] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.792778] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865116, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.821511] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865117, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.893734] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865115, 'name': Rename_Task, 'duration_secs': 2.231776} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.894033] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1022.894296] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d5335c8-436d-4eda-baf8-854838d24e5f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.901485] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1022.901485] env[69982]: value = "task-3865118" [ 1022.901485] env[69982]: _type = "Task" [ 1022.901485] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.910049] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865118, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.133190] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "d2684194-a688-4466-9852-1f4ff656f057" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.133561] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.133839] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "d2684194-a688-4466-9852-1f4ff656f057-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.134061] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.134266] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.136727] env[69982]: INFO nova.compute.manager [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Terminating instance [ 1023.211806] env[69982]: DEBUG nova.scheduler.client.report [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.293979] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865116, 'name': CreateVM_Task, 'duration_secs': 1.39021} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.294188] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1023.295031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.295158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.295420] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1023.295691] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e29e333a-5527-4864-9950-f241cd266a57 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.301414] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1023.301414] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527948c1-77a5-f4d6-783c-28a8e136e9c2" [ 1023.301414] env[69982]: _type = "Task" [ 1023.301414] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.310734] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527948c1-77a5-f4d6-783c-28a8e136e9c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.321462] env[69982]: DEBUG oslo_vmware.api [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865117, 'name': PowerOnVM_Task, 'duration_secs': 1.223427} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.321561] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.321837] env[69982]: INFO nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Took 10.73 seconds to spawn the instance on the hypervisor. [ 1023.322031] env[69982]: DEBUG nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.323025] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de3a449-fce8-4044-8866-f413e717e778 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.414898] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865118, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.541990] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.542319] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8bbaf92-3108-4293-877c-9c488febb88a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.551717] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1023.551717] env[69982]: value = "task-3865119" [ 1023.551717] env[69982]: _type = "Task" [ 1023.551717] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.559821] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865119, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.647021] env[69982]: DEBUG nova.compute.manager [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1023.647021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1023.647021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7155856e-c96d-4bff-b37b-86ea65b89e28 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.657431] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.657814] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f59d224e-234c-4377-a3fa-d7321b5f8b53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.668608] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 1023.668608] env[69982]: value = "task-3865120" [ 1023.668608] env[69982]: _type = "Task" [ 1023.668608] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.680122] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.718901] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.016s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.724186] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.155s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.724186] env[69982]: DEBUG nova.objects.instance [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lazy-loading 'resources' on Instance uuid ab14774e-c834-41e9-bb3f-87722b51070e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.760241] env[69982]: INFO nova.scheduler.client.report [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Deleted allocations for instance 9a1de08e-3206-44cc-8d34-a5527faf9684 [ 1023.816873] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527948c1-77a5-f4d6-783c-28a8e136e9c2, 'name': SearchDatastore_Task, 'duration_secs': 0.01091} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.819275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.819275] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1023.819275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.819559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.819998] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1023.821328] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95446a10-1202-4a5b-9f5e-ffbdf155b81b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.832801] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1023.833061] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1023.833951] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe0c54c3-6ee1-4c97-96d4-d3c77a6b6e6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.849074] env[69982]: INFO nova.compute.manager [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Took 41.93 seconds to build instance. [ 1023.852136] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1023.852136] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9963a-beb7-88a9-291d-f47ad04ad7aa" [ 1023.852136] env[69982]: _type = "Task" [ 1023.852136] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.867023] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9963a-beb7-88a9-291d-f47ad04ad7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.913927] env[69982]: DEBUG oslo_vmware.api [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865118, 'name': PowerOnVM_Task, 'duration_secs': 0.791098} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.914361] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1023.914557] env[69982]: DEBUG nova.compute.manager [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.915396] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1c0098-a48b-487f-97f4-a581e25f4278 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.060757] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865119, 'name': PowerOffVM_Task, 'duration_secs': 0.242352} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.061154] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.061477] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.063208] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873db253-f169-4cf4-a063-e2da117ba116 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.076766] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.077172] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee2d27db-bfe4-455c-8d24-63b5fabf1b0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.141751] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.145309] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.145629] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleting the datastore file [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.146415] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd203fb5-63ae-4f10-a185-fc01eb094325 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.156103] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1024.156103] env[69982]: value = "task-3865122" [ 1024.156103] env[69982]: _type = "Task" [ 1024.156103] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.165800] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.181897] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865120, 'name': PowerOffVM_Task, 'duration_secs': 0.30109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.182279] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1024.183058] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.183398] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d89f3e92-ead4-4fba-a5c5-adc54d540d20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.256211] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.256489] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.256746] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore2] d2684194-a688-4466-9852-1f4ff656f057 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.259714] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad60cb9e-a3f5-4b47-bdaf-82c807d50868 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.269141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84cb800a-c27b-42c1-9e9f-7b20799ff5ea tempest-VolumesAdminNegativeTest-1489893392 tempest-VolumesAdminNegativeTest-1489893392-project-member] Lock "9a1de08e-3206-44cc-8d34-a5527faf9684" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.192s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.269600] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 1024.269600] env[69982]: value = "task-3865124" [ 1024.269600] env[69982]: _type = "Task" [ 1024.269600] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.279454] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.354592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-756d3cc4-a7ed-4a6b-a085-e348fdcf6f32 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.455s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.373257] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a9963a-beb7-88a9-291d-f47ad04ad7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.011477} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.378024] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-055b83e5-2016-4c70-8f11-f991f7ba7d02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.383883] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1024.383883] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d18667-76ca-fede-486d-5907c5b818fb" [ 1024.383883] env[69982]: _type = "Task" [ 1024.383883] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.393906] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d18667-76ca-fede-486d-5907c5b818fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.438710] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.572392] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.573414] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.573697] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.573999] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.574201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.577042] env[69982]: INFO nova.compute.manager [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Terminating instance [ 1024.671028] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166088} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.671028] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.671176] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.672019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.698684] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d85a2d-76e0-4801-9942-5443725ab1dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.708037] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904955ae-e231-49b2-b360-3b8731a21845 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.751646] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070adc04-247b-4169-994c-864a6eab9efa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.761223] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378faebd-6b3b-4c7d-8a5f-cec4ffdf69e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.784197] env[69982]: DEBUG nova.compute.provider_tree [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1024.791857] env[69982]: DEBUG oslo_vmware.api [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158582} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.792309] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.792531] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.792692] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.792858] env[69982]: INFO nova.compute.manager [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: d2684194-a688-4466-9852-1f4ff656f057] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1024.793132] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1024.793966] env[69982]: DEBUG nova.compute.manager [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1024.794087] env[69982]: DEBUG nova.network.neutron [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1024.897390] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d18667-76ca-fede-486d-5907c5b818fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010282} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.897875] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.898630] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1024.898961] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d37c1cc1-4009-48ae-90f4-030bb78b7862 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.908592] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1024.908592] env[69982]: value = "task-3865125" [ 1024.908592] env[69982]: _type = "Task" [ 1024.908592] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.918058] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.085717] env[69982]: DEBUG nova.compute.manager [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1025.086200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.087723] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f7060a-1400-4f25-8316-a89ed046fcec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.101201] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1025.101833] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d453d7d-33cd-40f9-8645-7ff25abfd0c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.112530] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1025.112530] env[69982]: value = "task-3865126" [ 1025.112530] env[69982]: _type = "Task" [ 1025.112530] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.127524] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865126, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.276940] env[69982]: DEBUG nova.compute.manager [req-27d6960a-6471-46c5-902b-cddaa6a8b98d req-1ad6df08-72ab-4f5b-9b45-5ed9e544e086 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Received event network-vif-deleted-3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1025.278227] env[69982]: INFO nova.compute.manager [req-27d6960a-6471-46c5-902b-cddaa6a8b98d req-1ad6df08-72ab-4f5b-9b45-5ed9e544e086 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Neutron deleted interface 3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71; detaching it from the instance and deleting it from the info cache [ 1025.278548] env[69982]: DEBUG nova.network.neutron [req-27d6960a-6471-46c5-902b-cddaa6a8b98d req-1ad6df08-72ab-4f5b-9b45-5ed9e544e086 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.287550] env[69982]: DEBUG nova.scheduler.client.report [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.424147] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865125, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.620856] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865126, 'name': PowerOffVM_Task, 'duration_secs': 0.285004} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.621138] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1025.621309] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.621650] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ac3f71db-2314-4b32-9b75-4d49b96d7c26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.695349] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1025.695349] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1025.695349] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Deleting the datastore file [datastore1] 3edfba43-55e9-4180-bb03-ce008af3a7d7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1025.695349] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bb3d5c7-1203-4860-830c-cd8aad957b47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.705596] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for the task: (returnval){ [ 1025.705596] env[69982]: value = "task-3865128" [ 1025.705596] env[69982]: _type = "Task" [ 1025.705596] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.715710] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.759553] env[69982]: DEBUG nova.network.neutron [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.785472] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a7ee3cc-e90c-48e1-b246-47b54874f990 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.796241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.074s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.799691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.700s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.803076] env[69982]: INFO nova.compute.claims [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.810637] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee8cee5-1fcb-447b-b6c9-7cd1ae0fa385 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.849678] env[69982]: INFO nova.scheduler.client.report [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted allocations for instance ab14774e-c834-41e9-bb3f-87722b51070e [ 1025.881021] env[69982]: DEBUG nova.compute.manager [req-27d6960a-6471-46c5-902b-cddaa6a8b98d req-1ad6df08-72ab-4f5b-9b45-5ed9e544e086 service nova] [instance: d2684194-a688-4466-9852-1f4ff656f057] Detach interface failed, port_id=3b50b2f7-54a6-4ee2-9d36-d71d26ba7a71, reason: Instance d2684194-a688-4466-9852-1f4ff656f057 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1025.896464] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1025.896737] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.897529] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1025.899957] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.900330] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1025.900884] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1025.901420] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1025.901420] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1025.901662] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1025.901802] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1025.901945] env[69982]: DEBUG nova.virt.hardware [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1025.903298] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c7bd03-6b3c-465b-adcd-36dcdac9f48d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.916447] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e37c8e-5b00-4144-82f7-c157b02d9e33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.935920] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582133} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.939171] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:ee:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73909075-891e-4fc4-a912-c3757fcda156', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.945671] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.947049] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1025.947049] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1025.947049] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.947049] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20468c26-e82e-4a15-bfd3-7049031a4936 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.949378] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-927bbe87-b77c-4479-868b-9e608ceed072 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.971042] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1025.971042] env[69982]: value = "task-3865129" [ 1025.971042] env[69982]: _type = "Task" [ 1025.971042] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.972289] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.972289] env[69982]: value = "task-3865130" [ 1025.972289] env[69982]: _type = "Task" [ 1025.972289] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.984011] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.987992] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865130, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.217584] env[69982]: DEBUG oslo_vmware.api [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Task: {'id': task-3865128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155153} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.217851] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1026.218055] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1026.218249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1026.218427] env[69982]: INFO nova.compute.manager [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1026.218672] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.218884] env[69982]: DEBUG nova.compute.manager [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1026.219015] env[69982]: DEBUG nova.network.neutron [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1026.263733] env[69982]: INFO nova.compute.manager [-] [instance: d2684194-a688-4466-9852-1f4ff656f057] Took 1.47 seconds to deallocate network for instance. [ 1026.388639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e3098db8-dce6-467f-99ff-59f170faa6ce tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "ab14774e-c834-41e9-bb3f-87722b51070e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.456s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.496661] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07714} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.500863] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.501197] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865130, 'name': CreateVM_Task, 'duration_secs': 0.511121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.501988] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b369b9e0-9473-4f8f-88b4-4a1204a344fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.504954] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.505562] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.505725] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.506047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.506737] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71924162-108e-4d3b-8ddd-8016d5222a39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.528975] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.530361] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c878d55-cd6d-4a81-a3e5-c2f723cf287b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.547209] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1026.547209] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528911c5-913a-ef8c-9021-a43ab2859c9d" [ 1026.547209] env[69982]: _type = "Task" [ 1026.547209] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.553197] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1026.553197] env[69982]: value = "task-3865131" [ 1026.553197] env[69982]: _type = "Task" [ 1026.553197] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.558359] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528911c5-913a-ef8c-9021-a43ab2859c9d, 'name': SearchDatastore_Task, 'duration_secs': 0.011778} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.562386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.562386] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1026.562386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.562386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.562540] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1026.565825] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3c55d0f-e723-4276-8f5f-04a4040bb16e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.575385] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.576075] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1026.576300] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1026.577073] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8ef11dd-c8de-4fac-b885-19ece4187ae1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.584453] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1026.584453] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be3e66-04e5-8140-5a51-74e9cb670350" [ 1026.584453] env[69982]: _type = "Task" [ 1026.584453] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.594879] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be3e66-04e5-8140-5a51-74e9cb670350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.775382] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.075310] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865131, 'name': ReconfigVM_Task, 'duration_secs': 0.505456} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.076244] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfigured VM instance instance-00000054 to attach disk [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.077706] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-15d9727d-0339-4c0f-bdb2-379fcf5b730e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.092976] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1027.092976] env[69982]: value = "task-3865132" [ 1027.092976] env[69982]: _type = "Task" [ 1027.092976] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.101647] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52be3e66-04e5-8140-5a51-74e9cb670350, 'name': SearchDatastore_Task, 'duration_secs': 0.019279} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.106760] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8665f3b-644a-4d19-9552-6b003705771b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.116563] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865132, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.120088] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1027.120088] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52149aba-aa5f-a910-9aeb-2a355f43d1f3" [ 1027.120088] env[69982]: _type = "Task" [ 1027.120088] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.132838] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52149aba-aa5f-a910-9aeb-2a355f43d1f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.173228] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.173518] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.217533] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.217826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.369016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b57db9a-27f0-4b11-91c3-e4d7bd8aa3c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.376542] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3171005-e63c-41af-abce-fb83783ecbbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.410584] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8d0c5d-9e6e-48f8-8819-2fdcad2d46e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.420661] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0571afb8-c409-48e4-a6d4-095b087c9ea2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.435451] env[69982]: DEBUG nova.compute.provider_tree [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.511156] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.511500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.511698] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.511889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.512075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.517201] env[69982]: INFO nova.compute.manager [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Terminating instance [ 1027.582962] env[69982]: DEBUG nova.compute.manager [req-1ea03918-a730-4759-8d32-cc6aa6286ea5 req-0a0643e6-67d3-4fbe-a24d-d458d0054808 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Received event network-vif-deleted-6722e358-b16e-4178-86f7-7f3fc7636466 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.585084] env[69982]: INFO nova.compute.manager [req-1ea03918-a730-4759-8d32-cc6aa6286ea5 req-0a0643e6-67d3-4fbe-a24d-d458d0054808 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Neutron deleted interface 6722e358-b16e-4178-86f7-7f3fc7636466; detaching it from the instance and deleting it from the info cache [ 1027.585265] env[69982]: DEBUG nova.network.neutron [req-1ea03918-a730-4759-8d32-cc6aa6286ea5 req-0a0643e6-67d3-4fbe-a24d-d458d0054808 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.613270] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865132, 'name': Rename_Task, 'duration_secs': 0.231027} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.614115] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.614537] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46033265-7351-4d84-801e-c9cb2347ce55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.624808] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1027.624808] env[69982]: value = "task-3865133" [ 1027.624808] env[69982]: _type = "Task" [ 1027.624808] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.641145] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52149aba-aa5f-a910-9aeb-2a355f43d1f3, 'name': SearchDatastore_Task, 'duration_secs': 0.017046} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.644342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.644658] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.645402] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.645773] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef6be80b-3e90-4c00-933e-da1650d26d97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.657923] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1027.657923] env[69982]: value = "task-3865134" [ 1027.657923] env[69982]: _type = "Task" [ 1027.657923] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.665406] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.676399] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1027.721025] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1027.725783] env[69982]: DEBUG nova.network.neutron [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.940051] env[69982]: DEBUG nova.scheduler.client.report [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.023583] env[69982]: DEBUG nova.compute.manager [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1028.023583] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1028.023583] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b84519f-7db3-46b6-b635-73ee9ffb40ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.040022] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1028.040022] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d3416e2-2dd6-48fc-951a-8fe95a450c9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.049023] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1028.049023] env[69982]: value = "task-3865135" [ 1028.049023] env[69982]: _type = "Task" [ 1028.049023] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.062993] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865135, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.091408] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecf6287a-92c8-4f61-8a8e-fdb82a006e1c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.102882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eece1de-7aa3-444e-94f8-55c56d449515 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.155615] env[69982]: DEBUG nova.compute.manager [req-1ea03918-a730-4759-8d32-cc6aa6286ea5 req-0a0643e6-67d3-4fbe-a24d-d458d0054808 service nova] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Detach interface failed, port_id=6722e358-b16e-4178-86f7-7f3fc7636466, reason: Instance 3edfba43-55e9-4180-bb03-ce008af3a7d7 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1028.167856] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.172427] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865134, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.217690] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.230533] env[69982]: INFO nova.compute.manager [-] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Took 2.01 seconds to deallocate network for instance. [ 1028.244588] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.244854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.260590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.446471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.447027] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1028.450036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.348s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.451245] env[69982]: INFO nova.compute.claims [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1028.560541] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865135, 'name': PowerOffVM_Task, 'duration_secs': 0.32988} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.560995] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1028.561456] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1028.561857] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73722c35-1440-49c7-ad53-8cfca8013c98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.635917] env[69982]: DEBUG oslo_vmware.api [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865133, 'name': PowerOnVM_Task, 'duration_secs': 0.863646} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.636416] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1028.636762] env[69982]: INFO nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Took 11.27 seconds to spawn the instance on the hypervisor. [ 1028.637945] env[69982]: DEBUG nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1028.639112] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a44f266-c72a-44bf-b107-eefca68fe6e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.669644] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616934} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.670295] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.670845] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.671516] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9a3f63d-969f-4428-ad16-1df7f362bd2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.680857] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1028.680857] env[69982]: value = "task-3865137" [ 1028.680857] env[69982]: _type = "Task" [ 1028.680857] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.687462] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1028.691383] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1028.691383] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore2] 69103bad-cb3f-4cd1-bfa1-c19b10395674 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1028.691383] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38d2e67e-5e0e-47a9-89d5-0c4002d47d02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.696102] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.701189] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1028.701189] env[69982]: value = "task-3865138" [ 1028.701189] env[69982]: _type = "Task" [ 1028.701189] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.720219] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865138, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.738075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.750517] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1028.956795] env[69982]: DEBUG nova.compute.utils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.962011] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.962201] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1029.040931] env[69982]: DEBUG nova.policy [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '005bbd5e6a314bf48f443ddc050f0a75', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18dcc70df5e144e3b4f0592b0112aaf7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.160940] env[69982]: INFO nova.compute.manager [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Took 39.50 seconds to build instance. [ 1029.196023] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120783} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.196023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.196023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15fa6be-a6cd-4722-ad68-9d0ff6e3d1ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.227600] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.233471] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49d5bee7-49ec-4d67-a1fa-7ac4fda50e88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.259781] env[69982]: DEBUG oslo_vmware.api [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865138, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256595} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.267463] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1029.267463] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1029.267463] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1029.267463] env[69982]: INFO nova.compute.manager [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1029.267463] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.267463] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1029.267723] env[69982]: value = "task-3865139" [ 1029.267723] env[69982]: _type = "Task" [ 1029.267723] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.267834] env[69982]: DEBUG nova.compute.manager [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1029.268324] env[69982]: DEBUG nova.network.neutron [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1029.282593] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.288867] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.463074] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1029.602958] env[69982]: DEBUG nova.compute.manager [req-06a8f83b-db99-464c-a396-44cd4cda23b9 req-aec6b90c-1870-4f44-9396-ad7e3d692697 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Received event network-vif-deleted-b67025a3-947b-4ccc-8d88-60d00242778d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.603362] env[69982]: INFO nova.compute.manager [req-06a8f83b-db99-464c-a396-44cd4cda23b9 req-aec6b90c-1870-4f44-9396-ad7e3d692697 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Neutron deleted interface b67025a3-947b-4ccc-8d88-60d00242778d; detaching it from the instance and deleting it from the info cache [ 1029.603511] env[69982]: DEBUG nova.network.neutron [req-06a8f83b-db99-464c-a396-44cd4cda23b9 req-aec6b90c-1870-4f44-9396-ad7e3d692697 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.619925] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Successfully created port: a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.665930] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad2f730a-8127-42d6-bab3-840704fdaf04 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.785163] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.961934] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2be06e-655f-4355-92f5-838b01f2c156 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.975903] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeacb0d4-c162-4b70-a17a-eecb04859d0e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.014846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35b4f09-f3e0-4f0f-9e31-a194bee8477e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.026771] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60668876-e17e-4888-a49c-363b6862bad4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.044825] env[69982]: DEBUG nova.compute.provider_tree [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1030.072302] env[69982]: DEBUG nova.network.neutron [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.107575] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd96d652-8a49-449f-8c38-b62b990cb7b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.120587] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d829de68-735a-45f4-a107-985e74e914c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.159462] env[69982]: DEBUG nova.compute.manager [req-06a8f83b-db99-464c-a396-44cd4cda23b9 req-aec6b90c-1870-4f44-9396-ad7e3d692697 service nova] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Detach interface failed, port_id=b67025a3-947b-4ccc-8d88-60d00242778d, reason: Instance 69103bad-cb3f-4cd1-bfa1-c19b10395674 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1030.282562] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865139, 'name': ReconfigVM_Task, 'duration_secs': 1.001853} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.283200] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108/8518f3c8-738d-468a-9f57-de50e4e67108.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.283972] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3af220a9-203c-4a65-af52-c1252a99191a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.290955] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1030.290955] env[69982]: value = "task-3865140" [ 1030.290955] env[69982]: _type = "Task" [ 1030.290955] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.299189] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865140, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.482308] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1030.520826] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1030.521101] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.521253] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.521431] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.521571] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.521776] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1030.522572] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1030.522572] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1030.522572] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1030.522572] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1030.522932] env[69982]: DEBUG nova.virt.hardware [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1030.523619] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44b5bd1-2081-4982-9d65-544ce8197b24 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.532856] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5ffcfa-8b02-45a9-8be1-1303de5fbc41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.552200] env[69982]: DEBUG nova.scheduler.client.report [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.575583] env[69982]: INFO nova.compute.manager [-] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Took 1.31 seconds to deallocate network for instance. [ 1030.801681] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865140, 'name': Rename_Task, 'duration_secs': 0.304857} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.801983] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.802243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6916abde-90bc-4c1f-905b-b5a9a9741ec2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.813509] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1030.813509] env[69982]: value = "task-3865141" [ 1030.813509] env[69982]: _type = "Task" [ 1030.813509] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.826430] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.056766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.057379] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1031.063719] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.938s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.064669] env[69982]: DEBUG nova.objects.instance [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1031.085984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.334527] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865141, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.367744] env[69982]: DEBUG nova.compute.manager [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1031.568017] env[69982]: DEBUG nova.compute.utils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1031.568017] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1031.568017] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1031.642563] env[69982]: DEBUG nova.policy [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1031.657565] env[69982]: DEBUG nova.compute.manager [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Received event network-vif-plugged-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1031.657565] env[69982]: DEBUG oslo_concurrency.lockutils [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] Acquiring lock "187cbba3-3700-4c40-a514-28e08ea13426-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.657565] env[69982]: DEBUG oslo_concurrency.lockutils [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] Lock "187cbba3-3700-4c40-a514-28e08ea13426-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.658456] env[69982]: DEBUG oslo_concurrency.lockutils [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] Lock "187cbba3-3700-4c40-a514-28e08ea13426-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.658779] env[69982]: DEBUG nova.compute.manager [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] No waiting events found dispatching network-vif-plugged-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.659120] env[69982]: WARNING nova.compute.manager [req-edf4db81-7629-4012-837d-1129d933855d req-77393189-8397-48cc-8941-58487851b344 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Received unexpected event network-vif-plugged-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a for instance with vm_state building and task_state spawning. [ 1031.828712] env[69982]: DEBUG oslo_vmware.api [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865141, 'name': PowerOnVM_Task, 'duration_secs': 0.56493} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.829840] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.830205] env[69982]: DEBUG nova.compute.manager [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.832140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a883d58-cc8f-471f-8c14-b877222887af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.843491] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Successfully updated port: a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.902181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.074958] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1032.079449] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d9f2306b-d69a-4eae-9618-55258701712a tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.080708] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.591s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.260618] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Successfully created port: 8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1032.353083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.353326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.353551] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1032.361463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.902933] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1033.086456] env[69982]: DEBUG nova.network.neutron [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Updating instance_info_cache with network_info: [{"id": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "address": "fa:16:3e:19:c3:b7", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f896a3-40", "ovs_interfaceid": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.094839] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1033.105942] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating resource usage from migration ea7d255f-79eb-4118-bb96-acb700b60ec6 [ 1033.106141] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating resource usage from migration 0520604d-a406-402f-9686-aee9f2ea548e [ 1033.128213] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1033.128491] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1033.129994] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1033.129994] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1033.129994] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1033.129994] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1033.130464] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1033.130464] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1033.130789] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1033.130988] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1033.131186] env[69982]: DEBUG nova.virt.hardware [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1033.132509] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683ab9f7-fb7b-41bb-a8e8-9d233bf88f86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.136946] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.137105] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.137301] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.137826] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9123b08c-d2ec-4c4d-bade-0acdae75640a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.138823] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d2684194-a688-4466-9852-1f4ff656f057 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1033.138823] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance cd839916-6daf-4b31-941d-6305a585bfaa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.138823] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d43e5e7a-577d-4fe9-aff7-9012adfbdb9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.138823] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 69103bad-cb3f-4cd1-bfa1-c19b10395674 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1033.139142] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 331f218a-ad6b-4417-b56d-83113e0c92cb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139142] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139142] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6d390a12-bfb4-4d91-9e83-a81560a08e1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139142] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ad43c35a-69bc-4c84-8869-cfde6f516b9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139275] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8518f3c8-738d-468a-9f57-de50e4e67108 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139275] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 719e6e11-ac2f-4b71-9a55-5c110f8aaba4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1033.139405] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 475a403c-bfdb-4239-b0d4-3baca441603f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1033.139501] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 3edfba43-55e9-4180-bb03-ce008af3a7d7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1033.139608] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 187cbba3-3700-4c40-a514-28e08ea13426 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.139788] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1033.148447] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad8d2a1-f5f6-4d2e-9d20-035906c24256 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.591431] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.592239] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Instance network_info: |[{"id": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "address": "fa:16:3e:19:c3:b7", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f896a3-40", "ovs_interfaceid": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.593453] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:c3:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '767a3a48-41d4-4a0c-961d-0024837f63bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0f896a3-40a4-4f1f-8bbc-f97ad11f499a', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.603857] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.603857] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.603857] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c5066cf-9687-472d-950c-ad254886e289 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.634507] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.634507] env[69982]: value = "task-3865142" [ 1033.634507] env[69982]: _type = "Task" [ 1033.634507] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.645211] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9c0d0f4f-9e88-4e67-99d9-d957652587cd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1033.646461] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865142, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.691200] env[69982]: DEBUG nova.compute.manager [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Received event network-changed-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.691200] env[69982]: DEBUG nova.compute.manager [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Refreshing instance network info cache due to event network-changed-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1033.691200] env[69982]: DEBUG oslo_concurrency.lockutils [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] Acquiring lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.691347] env[69982]: DEBUG oslo_concurrency.lockutils [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] Acquired lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.691563] env[69982]: DEBUG nova.network.neutron [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Refreshing network info cache for port a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1033.722113] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "dffd4365-d2b6-4201-be46-a823399bb2a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.722405] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.948225] env[69982]: DEBUG nova.compute.manager [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Received event network-vif-plugged-8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1033.948225] env[69982]: DEBUG oslo_concurrency.lockutils [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] Acquiring lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.948225] env[69982]: DEBUG oslo_concurrency.lockutils [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.948225] env[69982]: DEBUG oslo_concurrency.lockutils [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.948458] env[69982]: DEBUG nova.compute.manager [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] No waiting events found dispatching network-vif-plugged-8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1033.948637] env[69982]: WARNING nova.compute.manager [req-440b0628-8d37-4587-a2ab-aa3164920925 req-c33a7dc9-4f18-49db-ada3-267180f99584 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Received unexpected event network-vif-plugged-8e419c4b-3285-40f4-972c-96a55d41ddd6 for instance with vm_state building and task_state spawning. [ 1034.051629] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Successfully updated port: 8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1034.147984] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 00f8efe0-28ad-4d95-b931-a31de0c03bd7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.149441] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865142, 'name': CreateVM_Task, 'duration_secs': 0.38973} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.149620] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.150472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.150648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.150984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1034.151761] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfb988b5-a7a6-4f11-afab-abcf05d9f395 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.157365] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1034.157365] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dc831f-d94f-9e34-6901-357e68dd0f9f" [ 1034.157365] env[69982]: _type = "Task" [ 1034.157365] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.166624] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dc831f-d94f-9e34-6901-357e68dd0f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.228584] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.458285] env[69982]: DEBUG nova.network.neutron [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Updated VIF entry in instance network info cache for port a0f896a3-40a4-4f1f-8bbc-f97ad11f499a. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1034.458776] env[69982]: DEBUG nova.network.neutron [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Updating instance_info_cache with network_info: [{"id": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "address": "fa:16:3e:19:c3:b7", "network": {"id": "86679f9e-4905-467f-9b96-561a778af240", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1249341304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18dcc70df5e144e3b4f0592b0112aaf7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "767a3a48-41d4-4a0c-961d-0024837f63bd", "external-id": "nsx-vlan-transportzone-141", "segmentation_id": 141, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0f896a3-40", "ovs_interfaceid": "a0f896a3-40a4-4f1f-8bbc-f97ad11f499a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.558803] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.559032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.559174] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1034.652770] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 5bbc7b58-3e8e-495f-911a-072d282e48a9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1034.652961] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration ea7d255f-79eb-4118-bb96-acb700b60ec6 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1034.653090] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d21659fd-015d-4f5b-b4b5-f38f550e0f00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1034.668702] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52dc831f-d94f-9e34-6901-357e68dd0f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.011014} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.669026] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.669288] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1034.669590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.669766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.669953] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1034.670518] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d045bcd-b63f-4937-b94b-4a4d068390db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.680279] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1034.680540] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1034.681297] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3989352-bf11-4bb0-a8d5-e7be13b84f00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.688106] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1034.688106] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ec4245-fc38-e5db-3b72-6d5042634e1b" [ 1034.688106] env[69982]: _type = "Task" [ 1034.688106] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.696893] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ec4245-fc38-e5db-3b72-6d5042634e1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.756093] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.961903] env[69982]: DEBUG oslo_concurrency.lockutils [req-8bd61812-6273-4d41-ab39-224e09027c08 req-cc95735f-22f4-458e-a452-5e19f675a809 service nova] Releasing lock "refresh_cache-187cbba3-3700-4c40-a514-28e08ea13426" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.093422] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1035.156592] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1035.200046] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ec4245-fc38-e5db-3b72-6d5042634e1b, 'name': SearchDatastore_Task, 'duration_secs': 0.010923} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.202135] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de65cedf-0d23-49cc-b140-9a0533452760 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.208278] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1035.208278] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52db0cc7-7c0f-b4ad-62d9-5f591cc60feb" [ 1035.208278] env[69982]: _type = "Task" [ 1035.208278] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.221577] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52db0cc7-7c0f-b4ad-62d9-5f591cc60feb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.275783] env[69982]: DEBUG nova.network.neutron [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Updating instance_info_cache with network_info: [{"id": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "address": "fa:16:3e:e8:fe:85", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e419c4b-32", "ovs_interfaceid": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.666048] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1035.719618] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52db0cc7-7c0f-b4ad-62d9-5f591cc60feb, 'name': SearchDatastore_Task, 'duration_secs': 0.043757} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.719914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.720192] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 187cbba3-3700-4c40-a514-28e08ea13426/187cbba3-3700-4c40-a514-28e08ea13426.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1035.720455] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-050cf0a8-2e5f-4b3c-8518-b7d8998bab91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.727734] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1035.727734] env[69982]: value = "task-3865143" [ 1035.727734] env[69982]: _type = "Task" [ 1035.727734] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.736782] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.781423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.782058] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Instance network_info: |[{"id": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "address": "fa:16:3e:e8:fe:85", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e419c4b-32", "ovs_interfaceid": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1035.782775] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:fe:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e419c4b-3285-40f4-972c-96a55d41ddd6', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.790975] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.791212] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.791436] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-354780a2-0e87-4337-9102-5218fc3c271f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.812501] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.812501] env[69982]: value = "task-3865144" [ 1035.812501] env[69982]: _type = "Task" [ 1035.812501] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.820680] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865144, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.976708] env[69982]: DEBUG nova.compute.manager [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Received event network-changed-8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.976871] env[69982]: DEBUG nova.compute.manager [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Refreshing instance network info cache due to event network-changed-8e419c4b-3285-40f4-972c-96a55d41ddd6. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1035.977134] env[69982]: DEBUG oslo_concurrency.lockutils [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] Acquiring lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.977290] env[69982]: DEBUG oslo_concurrency.lockutils [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] Acquired lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.978098] env[69982]: DEBUG nova.network.neutron [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Refreshing network info cache for port 8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1036.170189] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1036.170189] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 0520604d-a406-402f-9686-aee9f2ea548e is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1036.170189] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance fc46eca6-6a60-477c-98de-a2e1f6c7e88b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.170825] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1036.170825] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1036.243542] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865143, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.326006] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865144, 'name': CreateVM_Task, 'duration_secs': 0.385479} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.329125] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.330153] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.330276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.330598] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.330885] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad5630d-738f-4983-9ff6-a43cc14a3ace {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.337387] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1036.337387] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52456272-76ef-7fde-df3d-4a3812396f43" [ 1036.337387] env[69982]: _type = "Task" [ 1036.337387] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.349549] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52456272-76ef-7fde-df3d-4a3812396f43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.584096] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5657e31-4855-4e7d-aa19-33c6638560d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.592361] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4ec5a9-dd65-4bc2-91b8-3a1f824ef312 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.626968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53fe5bd-3586-4b70-8ebc-2116889b1579 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.635795] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c23535-481e-4936-b423-ac5646332c0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.651765] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.741016] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.75473} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.741565] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 187cbba3-3700-4c40-a514-28e08ea13426/187cbba3-3700-4c40-a514-28e08ea13426.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1036.741565] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1036.741870] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d115e17c-26b0-4b6b-9d08-18e57c0af514 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.749824] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1036.749824] env[69982]: value = "task-3865145" [ 1036.749824] env[69982]: _type = "Task" [ 1036.749824] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.759298] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.782387] env[69982]: DEBUG nova.network.neutron [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Updated VIF entry in instance network info cache for port 8e419c4b-3285-40f4-972c-96a55d41ddd6. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1036.782771] env[69982]: DEBUG nova.network.neutron [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Updating instance_info_cache with network_info: [{"id": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "address": "fa:16:3e:e8:fe:85", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e419c4b-32", "ovs_interfaceid": "8e419c4b-3285-40f4-972c-96a55d41ddd6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.849038] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52456272-76ef-7fde-df3d-4a3812396f43, 'name': SearchDatastore_Task, 'duration_secs': 0.026449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.849357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.849592] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.849870] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.850025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.850220] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.850504] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b1f9bd2-1e76-4e23-a5af-bd2dbe0ca4c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.859648] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.859837] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.860595] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-689b12e2-ebbe-4b6e-a43c-bad9e27d28ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.867689] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1036.867689] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228e3b8-8c65-e918-1831-e1db8322c006" [ 1036.867689] env[69982]: _type = "Task" [ 1036.867689] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.876507] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228e3b8-8c65-e918-1831-e1db8322c006, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.158612] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1037.260390] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067152} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.260759] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.261493] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b4012c-8406-4b92-b770-512bad2226a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.284533] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 187cbba3-3700-4c40-a514-28e08ea13426/187cbba3-3700-4c40-a514-28e08ea13426.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.284932] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce786795-bbec-4909-824d-a13f34688a6e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.299961] env[69982]: DEBUG oslo_concurrency.lockutils [req-f288055b-4879-43ce-9dfb-3d04004849d2 req-bd1a1f52-ecae-46d8-8de6-ba4f0853a328 service nova] Releasing lock "refresh_cache-a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.307432] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1037.307432] env[69982]: value = "task-3865146" [ 1037.307432] env[69982]: _type = "Task" [ 1037.307432] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.316067] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865146, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.380255] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228e3b8-8c65-e918-1831-e1db8322c006, 'name': SearchDatastore_Task, 'duration_secs': 0.010216} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.380914] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d00a117b-9d59-4a7f-bb35-bae2405c4cc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.386424] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1037.386424] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f6b4d3-fc4f-dd4f-f69d-8a2fa13ef320" [ 1037.386424] env[69982]: _type = "Task" [ 1037.386424] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.394727] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f6b4d3-fc4f-dd4f-f69d-8a2fa13ef320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.665071] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1037.665071] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.585s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.665071] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.206s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.665351] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.667517] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.931s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.668974] env[69982]: INFO nova.compute.claims [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1037.688322] env[69982]: INFO nova.scheduler.client.report [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted allocations for instance 475a403c-bfdb-4239-b0d4-3baca441603f [ 1037.818647] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865146, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.897716] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f6b4d3-fc4f-dd4f-f69d-8a2fa13ef320, 'name': SearchDatastore_Task, 'duration_secs': 0.010883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.898045] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.898332] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae/a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.898623] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bae5346-b440-42dd-bbb1-326145db0dbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.906369] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1037.906369] env[69982]: value = "task-3865147" [ 1037.906369] env[69982]: _type = "Task" [ 1037.906369] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.915777] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.196500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dfec61d6-dfe8-4905-9232-596c37bdc186 tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "475a403c-bfdb-4239-b0d4-3baca441603f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.007s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.323031] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865146, 'name': ReconfigVM_Task, 'duration_secs': 0.737486} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.323031] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 187cbba3-3700-4c40-a514-28e08ea13426/187cbba3-3700-4c40-a514-28e08ea13426.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.323531] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04fb8a30-7602-4f7a-8eeb-3db27ebd3aa9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.331855] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1038.331855] env[69982]: value = "task-3865148" [ 1038.331855] env[69982]: _type = "Task" [ 1038.331855] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.343577] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865148, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.417617] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865147, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.774133] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.774404] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.845317] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865148, 'name': Rename_Task, 'duration_secs': 0.179804} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.848416] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1038.848937] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2615d684-3413-4008-a406-57c5abb5a57a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.857174] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1038.857174] env[69982]: value = "task-3865149" [ 1038.857174] env[69982]: _type = "Task" [ 1038.857174] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.868370] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.919914] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865147, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525622} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.920117] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae/a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1038.920391] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1038.920656] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a7eec6d-cc7d-4558-94cf-916b9dde32ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.930655] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1038.930655] env[69982]: value = "task-3865150" [ 1038.930655] env[69982]: _type = "Task" [ 1038.930655] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.941040] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.082088] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0448e5be-1b0d-44b1-8600-3c70a934b9b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.091437] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b08eaf-d727-4b47-9791-72feeed22551 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.122744] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7587626f-5e86-4ea4-9060-1b44bcf414e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.130985] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6412f5-8bbc-4dc3-9341-81e5d54f0ba2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.145194] env[69982]: DEBUG nova.compute.provider_tree [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.277497] env[69982]: INFO nova.compute.manager [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Detaching volume a121791e-c716-401f-b84f-37fb82a9d242 [ 1039.317776] env[69982]: INFO nova.virt.block_device [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Attempting to driver detach volume a121791e-c716-401f-b84f-37fb82a9d242 from mountpoint /dev/sdb [ 1039.317776] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1039.317776] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768021', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'name': 'volume-a121791e-c716-401f-b84f-37fb82a9d242', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '331f218a-ad6b-4417-b56d-83113e0c92cb', 'attached_at': '', 'detached_at': '', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'serial': 'a121791e-c716-401f-b84f-37fb82a9d242'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1039.319204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b803556c-353e-4e6d-8d66-e9b8cd767ee6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.344767] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4790fa70-fe2f-4f32-a410-47219cb77270 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.352863] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ce48d6-1d67-4dfa-b996-64b21b57f9a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.377041] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0558116f-eaaf-4c4c-81ac-4d9e4275cd79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.385805] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865149, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.397033] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] The volume has not been displaced from its original location: [datastore2] volume-a121791e-c716-401f-b84f-37fb82a9d242/volume-a121791e-c716-401f-b84f-37fb82a9d242.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1039.402335] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1039.402702] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5da7261e-20f5-47b0-b12c-062570524f6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.422026] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1039.422026] env[69982]: value = "task-3865151" [ 1039.422026] env[69982]: _type = "Task" [ 1039.422026] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.430850] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.439674] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074333} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.440090] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.440889] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97934fad-2576-4302-8e80-0d5901fcbef2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.472583] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae/a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.472968] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63abb08a-0313-434c-9006-900252868879 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.493231] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1039.493231] env[69982]: value = "task-3865152" [ 1039.493231] env[69982]: _type = "Task" [ 1039.493231] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.503372] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.648763] env[69982]: DEBUG nova.scheduler.client.report [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.978256] env[69982]: DEBUG oslo_vmware.api [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865149, 'name': PowerOnVM_Task, 'duration_secs': 0.930022} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.978256] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1039.978256] env[69982]: INFO nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1039.978256] env[69982]: DEBUG nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.978256] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e0e49a-cf0a-4a6c-b535-8ff5075f9b50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.979594] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865151, 'name': ReconfigVM_Task, 'duration_secs': 0.262754} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.979594] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1039.979594] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369a75cd-2cd3-47cf-bc03-ce81db7a00d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.979594] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1039.979594] env[69982]: value = "task-3865153" [ 1039.979594] env[69982]: _type = "Task" [ 1039.979594] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.979594] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865153, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.005491] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865152, 'name': ReconfigVM_Task, 'duration_secs': 0.445456} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.005662] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Reconfigured VM instance instance-00000056 to attach disk [datastore2] a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae/a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.006429] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdba53e6-75fa-4cb7-a57d-8a7f69c3c897 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.014128] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1040.014128] env[69982]: value = "task-3865154" [ 1040.014128] env[69982]: _type = "Task" [ 1040.014128] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.025781] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865154, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.153962] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.154576] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1040.157186] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.422s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.159021] env[69982]: INFO nova.compute.claims [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1040.410801] env[69982]: INFO nova.compute.manager [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Took 36.34 seconds to build instance. [ 1040.477098] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.524535] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865154, 'name': Rename_Task, 'duration_secs': 0.408525} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.524835] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.525115] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0ac8b63-e241-44b9-8ed6-9b1a99fba7a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.532738] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1040.532738] env[69982]: value = "task-3865155" [ 1040.532738] env[69982]: _type = "Task" [ 1040.532738] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.541186] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865155, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.663940] env[69982]: DEBUG nova.compute.utils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1040.668375] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1040.668690] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1040.729190] env[69982]: DEBUG nova.policy [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1040.912760] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8ea35fa3-c401-4511-b438-ca68769d36d9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.852s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.989576] env[69982]: DEBUG oslo_vmware.api [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865153, 'name': ReconfigVM_Task, 'duration_secs': 0.801611} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.989576] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768021', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'name': 'volume-a121791e-c716-401f-b84f-37fb82a9d242', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '331f218a-ad6b-4417-b56d-83113e0c92cb', 'attached_at': '', 'detached_at': '', 'volume_id': 'a121791e-c716-401f-b84f-37fb82a9d242', 'serial': 'a121791e-c716-401f-b84f-37fb82a9d242'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1041.046365] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865155, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.082871] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Successfully created port: ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1041.169704] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1041.192655] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "187cbba3-3700-4c40-a514-28e08ea13426" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.193264] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.193389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "187cbba3-3700-4c40-a514-28e08ea13426-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.196033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.196033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.196410] env[69982]: INFO nova.compute.manager [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Terminating instance [ 1041.538886] env[69982]: DEBUG nova.objects.instance [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'flavor' on Instance uuid 331f218a-ad6b-4417-b56d-83113e0c92cb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.549480] env[69982]: DEBUG oslo_vmware.api [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865155, 'name': PowerOnVM_Task, 'duration_secs': 0.965152} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.549800] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.550097] env[69982]: INFO nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1041.550300] env[69982]: DEBUG nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.551145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fef569-9bd3-4c38-8526-655338891d46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.595517] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03dbbcd1-9458-4b81-94c8-fab0abbcafa9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.606286] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc27b988-3c74-4004-b057-d01e12c9e8c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.645435] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5afe880-e10b-41f2-a30a-e6e424180439 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.653629] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fff48db-fffb-4b86-b240-13858b950b1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.669493] env[69982]: DEBUG nova.compute.provider_tree [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.708204] env[69982]: DEBUG nova.compute.manager [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1041.708486] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1041.709473] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4d477d-b8a2-4037-89e6-cbf20febaa43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.718740] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1041.719015] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a787f4a4-759b-482a-91a2-271e835f19bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.726295] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1041.726295] env[69982]: value = "task-3865156" [ 1041.726295] env[69982]: _type = "Task" [ 1041.726295] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.736428] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.070172] env[69982]: INFO nova.compute.manager [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Took 30.00 seconds to build instance. [ 1042.173046] env[69982]: DEBUG nova.scheduler.client.report [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.183308] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1042.213214] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1042.213490] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1042.213648] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1042.213824] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1042.213972] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1042.214181] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1042.214408] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1042.214569] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1042.214738] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1042.214903] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1042.215090] env[69982]: DEBUG nova.virt.hardware [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1042.216015] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4292c537-1e29-4e91-a401-14b12f0dfb2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.225031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5a5f4a-d232-44c0-a183-b29bf9247e7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.236989] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865156, 'name': PowerOffVM_Task, 'duration_secs': 0.308317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.246812] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.246812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1042.246812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eceb06d-01f3-4ff4-a138-1e0b0feab58a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.311597] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1042.311949] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1042.312186] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleting the datastore file [datastore2] 187cbba3-3700-4c40-a514-28e08ea13426 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1042.312471] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9521476-2295-4981-8f07-7e81d36fb637 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.319638] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for the task: (returnval){ [ 1042.319638] env[69982]: value = "task-3865158" [ 1042.319638] env[69982]: _type = "Task" [ 1042.319638] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.328677] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.548992] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a43f283c-22d8-4ea1-a6b8-8b8a99cb96ba tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.774s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.572277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5479a61-00d4-46f2-b6fb-4fcbe825e086 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.514s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.619554] env[69982]: DEBUG nova.compute.manager [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Received event network-vif-plugged-ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.619782] env[69982]: DEBUG oslo_concurrency.lockutils [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] Acquiring lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.619990] env[69982]: DEBUG oslo_concurrency.lockutils [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.620173] env[69982]: DEBUG oslo_concurrency.lockutils [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.620341] env[69982]: DEBUG nova.compute.manager [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] No waiting events found dispatching network-vif-plugged-ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.620512] env[69982]: WARNING nova.compute.manager [req-80bae1a9-3027-4336-ac6c-f135b5d77a0b req-00b48a97-f5e4-487d-8fe8-5be6dca55ebc service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Received unexpected event network-vif-plugged-ee39c037-aaca-4a17-aa77-f50203a74e94 for instance with vm_state building and task_state spawning. [ 1042.679350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.679977] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1042.682734] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.264s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.683662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.685174] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.231s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.688393] env[69982]: INFO nova.compute.claims [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1042.722351] env[69982]: INFO nova.scheduler.client.report [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Deleted allocations for instance 719e6e11-ac2f-4b71-9a55-5c110f8aaba4 [ 1042.773397] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Successfully updated port: ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.835245] env[69982]: DEBUG oslo_vmware.api [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Task: {'id': task-3865158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147837} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.835613] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1042.835845] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1042.836074] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1042.837023] env[69982]: INFO nova.compute.manager [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1042.837361] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1042.837900] env[69982]: DEBUG nova.compute.manager [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1042.838048] env[69982]: DEBUG nova.network.neutron [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1043.195499] env[69982]: DEBUG nova.compute.utils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1043.199880] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1043.200087] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1043.233211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-143a9ab7-d314-4ece-ba09-97e55a646b9e tempest-ServersListShow298Test-2141021973 tempest-ServersListShow298Test-2141021973-project-member] Lock "719e6e11-ac2f-4b71-9a55-5c110f8aaba4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.152s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.276907] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.277083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.277241] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1043.291395] env[69982]: DEBUG nova.policy [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f381157804754723b80e45911120b327', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '17451571a4124ee0b2d674b9bd38c66e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1043.334782] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39555a17-3099-4722-b42b-01c49f77f160 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.343660] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Suspending the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1043.343976] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-4cbbdc94-0395-4256-9362-6eae163ed538 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.351753] env[69982]: DEBUG oslo_vmware.api [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1043.351753] env[69982]: value = "task-3865159" [ 1043.351753] env[69982]: _type = "Task" [ 1043.351753] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.362332] env[69982]: DEBUG oslo_vmware.api [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865159, 'name': SuspendVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.393251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.393537] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.393748] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1043.393979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.394320] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.396912] env[69982]: INFO nova.compute.manager [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Terminating instance [ 1043.710812] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1043.736610] env[69982]: DEBUG nova.network.neutron [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.777472] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Successfully created port: bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.830610] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1043.865306] env[69982]: DEBUG oslo_vmware.api [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865159, 'name': SuspendVM_Task} progress is 62%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.901389] env[69982]: DEBUG nova.compute.manager [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1043.901643] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.905565] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a5fbdd-10fe-4c7b-a4f2-a1ed14b8b62f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.916225] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.918966] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63fff9b5-4340-42e0-bb2e-0cb78084218f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.926908] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1043.926908] env[69982]: value = "task-3865160" [ 1043.926908] env[69982]: _type = "Task" [ 1043.926908] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.938305] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.154375] env[69982]: DEBUG nova.network.neutron [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Updating instance_info_cache with network_info: [{"id": "ee39c037-aaca-4a17-aa77-f50203a74e94", "address": "fa:16:3e:06:02:30", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee39c037-aa", "ovs_interfaceid": "ee39c037-aaca-4a17-aa77-f50203a74e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.224363] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dd07f5-37eb-4f15-a91d-b7e1f697c150 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.233636] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9cd8e17-8150-48fe-a1b0-395f8a218a60 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.239796] env[69982]: INFO nova.compute.manager [-] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Took 1.40 seconds to deallocate network for instance. [ 1044.271421] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528facfe-bd15-44d0-88b7-1f963c0781fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.282290] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05f5435-66dd-4938-a6f9-1df74c9bbd94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.298046] env[69982]: DEBUG nova.compute.provider_tree [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.365029] env[69982]: DEBUG oslo_vmware.api [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865159, 'name': SuspendVM_Task, 'duration_secs': 0.664704} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.365029] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Suspended the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1044.365322] env[69982]: DEBUG nova.compute.manager [None req-2f207f5a-39e6-41cf-a221-bef7a4725de9 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1044.366528] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf36725-7854-4d19-ab20-a92a789a4488 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.438212] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865160, 'name': PowerOffVM_Task, 'duration_secs': 0.229504} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.438508] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.438683] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.439008] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dcba4052-c4c0-49fe-b5e0-24a19afb7f06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.529533] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1044.529784] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1044.529972] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleting the datastore file [datastore1] 331f218a-ad6b-4417-b56d-83113e0c92cb {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1044.530267] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0b180d0-098a-48d6-88b8-e5e547034b04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.537151] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1044.537151] env[69982]: value = "task-3865162" [ 1044.537151] env[69982]: _type = "Task" [ 1044.537151] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.546917] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.659141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.659489] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Instance network_info: |[{"id": "ee39c037-aaca-4a17-aa77-f50203a74e94", "address": "fa:16:3e:06:02:30", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee39c037-aa", "ovs_interfaceid": "ee39c037-aaca-4a17-aa77-f50203a74e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.660023] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:02:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee39c037-aaca-4a17-aa77-f50203a74e94', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.668356] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.668594] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.668819] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ad95e98-c84a-42e1-9eab-06b0d4f3cdc5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.691915] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.691915] env[69982]: value = "task-3865163" [ 1044.691915] env[69982]: _type = "Task" [ 1044.691915] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.700250] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865163, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.730037] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1044.735234] env[69982]: DEBUG nova.compute.manager [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Received event network-changed-ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.735440] env[69982]: DEBUG nova.compute.manager [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Refreshing instance network info cache due to event network-changed-ee39c037-aaca-4a17-aa77-f50203a74e94. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1044.736052] env[69982]: DEBUG oslo_concurrency.lockutils [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] Acquiring lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.736244] env[69982]: DEBUG oslo_concurrency.lockutils [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] Acquired lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.736481] env[69982]: DEBUG nova.network.neutron [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Refreshing network info cache for port ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1044.759360] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1044.759798] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1044.760044] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1044.760292] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1044.760446] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1044.760593] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1044.760808] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1044.760971] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1044.761173] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1044.761341] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1044.761515] env[69982]: DEBUG nova.virt.hardware [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1044.762862] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacaeb89-300f-48e4-ba53-dda1f723aa5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.772560] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f469657-9166-499f-acde-01b2fefd10ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.777560] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.802179] env[69982]: DEBUG nova.scheduler.client.report [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.048731] env[69982]: DEBUG oslo_vmware.api [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197809} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.049185] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1045.049491] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1045.049804] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1045.050152] env[69982]: INFO nova.compute.manager [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1045.050673] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1045.051155] env[69982]: DEBUG nova.compute.manager [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1045.051368] env[69982]: DEBUG nova.network.neutron [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1045.203123] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865163, 'name': CreateVM_Task, 'duration_secs': 0.371721} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.203387] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.204127] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.204379] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.204662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.204948] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7c82b6-5672-4b25-ac6b-12318b69ab49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.211063] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1045.211063] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528eb31a-f34f-754a-f21d-5cd724067448" [ 1045.211063] env[69982]: _type = "Task" [ 1045.211063] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.220393] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528eb31a-f34f-754a-f21d-5cd724067448, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.307323] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.307874] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1045.311213] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 22.540s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.726528] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528eb31a-f34f-754a-f21d-5cd724067448, 'name': SearchDatastore_Task, 'duration_secs': 0.031097} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.729062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.729062] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.729062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.729062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.729062] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.729541] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-60f4089b-ad60-4d83-a826-05847fd1ee02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.740432] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.740693] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.741908] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5da843eb-ea70-4e58-8d1c-2cdfe756b231 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.748608] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1045.748608] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a8556-46cf-0ef8-abd5-cc12a1afb6dd" [ 1045.748608] env[69982]: _type = "Task" [ 1045.748608] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.758510] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a8556-46cf-0ef8-abd5-cc12a1afb6dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.818715] env[69982]: DEBUG nova.compute.utils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1045.822441] env[69982]: INFO nova.compute.claims [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.827528] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1045.827985] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1045.884903] env[69982]: DEBUG nova.network.neutron [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Updated VIF entry in instance network info cache for port ee39c037-aaca-4a17-aa77-f50203a74e94. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1045.885334] env[69982]: DEBUG nova.network.neutron [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Updating instance_info_cache with network_info: [{"id": "ee39c037-aaca-4a17-aa77-f50203a74e94", "address": "fa:16:3e:06:02:30", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee39c037-aa", "ovs_interfaceid": "ee39c037-aaca-4a17-aa77-f50203a74e94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.909672] env[69982]: DEBUG nova.policy [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '361bff09a25e4b5ab6a071a458858131', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afaf89b0250d46048813da25c754e1a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1046.008791] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Successfully updated port: bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.056519] env[69982]: DEBUG nova.compute.manager [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Received event network-vif-plugged-bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.056868] env[69982]: DEBUG oslo_concurrency.lockutils [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] Acquiring lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.057054] env[69982]: DEBUG oslo_concurrency.lockutils [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.057186] env[69982]: DEBUG oslo_concurrency.lockutils [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.057434] env[69982]: DEBUG nova.compute.manager [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] No waiting events found dispatching network-vif-plugged-bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1046.057558] env[69982]: WARNING nova.compute.manager [req-68f3900d-a3cb-4edb-8aff-ba9b08991bfd req-aa02d1d3-6d10-43ac-87dd-0144b647e496 service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Received unexpected event network-vif-plugged-bcebda23-660e-4d10-a486-029c8cef42dc for instance with vm_state building and task_state spawning. [ 1046.152111] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.152405] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.152621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.152840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.153110] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.155939] env[69982]: INFO nova.compute.manager [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Terminating instance [ 1046.157821] env[69982]: DEBUG nova.network.neutron [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.263859] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a8556-46cf-0ef8-abd5-cc12a1afb6dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010772} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.268066] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d7b5aa6-f63f-4edd-92e5-d79758d2b75a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.272597] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1046.272597] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a6a71-95e3-bf15-2998-94db10329c4c" [ 1046.272597] env[69982]: _type = "Task" [ 1046.272597] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.286560] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a6a71-95e3-bf15-2998-94db10329c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.01149} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.287090] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.287494] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9c0d0f4f-9e88-4e67-99d9-d957652587cd/9c0d0f4f-9e88-4e67-99d9-d957652587cd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.288297] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-925a6c1e-e368-478a-bdec-29c62d6fc5cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.298203] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1046.298203] env[69982]: value = "task-3865164" [ 1046.298203] env[69982]: _type = "Task" [ 1046.298203] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.309644] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.328954] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1046.336038] env[69982]: INFO nova.compute.resource_tracker [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating resource usage from migration ea7d255f-79eb-4118-bb96-acb700b60ec6 [ 1046.351106] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Successfully created port: 53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1046.391883] env[69982]: DEBUG oslo_concurrency.lockutils [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] Releasing lock "refresh_cache-9c0d0f4f-9e88-4e67-99d9-d957652587cd" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.391883] env[69982]: DEBUG nova.compute.manager [req-01d760ee-e000-4ef9-9725-248a157f34a2 req-7d8d0213-4ced-478c-97a7-dd5d8958f644 service nova] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Received event network-vif-deleted-a0f896a3-40a4-4f1f-8bbc-f97ad11f499a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.511689] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.511949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquired lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.512139] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1046.660706] env[69982]: INFO nova.compute.manager [-] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Took 1.61 seconds to deallocate network for instance. [ 1046.662575] env[69982]: DEBUG nova.compute.manager [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1046.662830] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1046.668077] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aca3f7e-5cd1-423e-b4d8-900bbb7f9814 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.681340] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1046.681668] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b08ffd61-eb35-48bc-8ae3-96731c8f2073 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.762322] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1046.762683] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1046.763023] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore2] a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.765129] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d696304-360c-4e7e-bd9c-96b510b0a38f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.773662] env[69982]: DEBUG nova.compute.manager [req-64fe1880-255d-4a2c-a8dc-925068134e0a req-2e37808c-3d8e-4b3d-b67c-552c8084cf8c service nova] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Received event network-vif-deleted-4e8435e5-b58d-4662-bc23-35592b41251f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.782158] env[69982]: DEBUG oslo_vmware.api [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1046.782158] env[69982]: value = "task-3865166" [ 1046.782158] env[69982]: _type = "Task" [ 1046.782158] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.796880] env[69982]: DEBUG oslo_vmware.api [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.810668] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865164, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.817854] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865eae46-6ace-4c65-9e04-831adc1b7b6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.828262] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7d6281-b22b-4ce3-8495-2bb57d0deaa9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.869209] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e47c9d5-9911-4c4d-b808-b4b0347d00f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.879791] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b89d75-2412-4f7f-8fd0-4bfe03f7461f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.895725] env[69982]: DEBUG nova.compute.provider_tree [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.062844] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1047.175705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1047.231867] env[69982]: DEBUG nova.network.neutron [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Updating instance_info_cache with network_info: [{"id": "bcebda23-660e-4d10-a486-029c8cef42dc", "address": "fa:16:3e:1d:b3:d5", "network": {"id": "408eb348-084c-45e3-9ea6-26aa0dddabf7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-836353718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17451571a4124ee0b2d674b9bd38c66e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcebda23-66", "ovs_interfaceid": "bcebda23-660e-4d10-a486-029c8cef42dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.293299] env[69982]: DEBUG oslo_vmware.api [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220939} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.294573] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.294573] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1047.294573] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1047.294573] env[69982]: INFO nova.compute.manager [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1047.294573] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.294812] env[69982]: DEBUG nova.compute.manager [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1047.294812] env[69982]: DEBUG nova.network.neutron [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1047.311891] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529214} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.312170] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 9c0d0f4f-9e88-4e67-99d9-d957652587cd/9c0d0f4f-9e88-4e67-99d9-d957652587cd.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.312377] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.312634] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdeb6178-33d9-4267-aec9-2b5229623bc5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.320829] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1047.320829] env[69982]: value = "task-3865167" [ 1047.320829] env[69982]: _type = "Task" [ 1047.320829] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.331162] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865167, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.370581] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1047.399415] env[69982]: DEBUG nova.scheduler.client.report [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.411953] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.412276] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.412400] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.412589] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.412743] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.412909] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.414065] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.414273] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.414462] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.414632] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.414807] env[69982]: DEBUG nova.virt.hardware [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.415722] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4aee3c-f142-4402-997f-c717ba9b83b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.427842] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ce63ed-1edb-4d0f-9fc7-bb39b3823367 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.735190] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Releasing lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.735652] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Instance network_info: |[{"id": "bcebda23-660e-4d10-a486-029c8cef42dc", "address": "fa:16:3e:1d:b3:d5", "network": {"id": "408eb348-084c-45e3-9ea6-26aa0dddabf7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-836353718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17451571a4124ee0b2d674b9bd38c66e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcebda23-66", "ovs_interfaceid": "bcebda23-660e-4d10-a486-029c8cef42dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1047.735959] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:b3:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ded18042-834c-4792-b3e8-b1c377446432', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcebda23-660e-4d10-a486-029c8cef42dc', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.744427] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Creating folder: Project (17451571a4124ee0b2d674b9bd38c66e). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1047.744702] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1438ec1-c269-45de-a576-d3378f0fffab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.755615] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Created folder: Project (17451571a4124ee0b2d674b9bd38c66e) in parent group-v767796. [ 1047.755857] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Creating folder: Instances. Parent ref: group-v768044. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1047.756221] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e4af305d-75dd-45e6-b19f-8cab3dccd849 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.765528] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Created folder: Instances in parent group-v768044. [ 1047.765855] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.765977] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.766250] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-627403e3-0f67-4f47-a0b3-c2d7d0d26eff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.785680] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.785680] env[69982]: value = "task-3865170" [ 1047.785680] env[69982]: _type = "Task" [ 1047.785680] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.793627] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865170, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.831994] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865167, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083244} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.832315] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.833240] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ef73a2-c414-4a3e-8276-33937f08e6de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.864858] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 9c0d0f4f-9e88-4e67-99d9-d957652587cd/9c0d0f4f-9e88-4e67-99d9-d957652587cd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.865252] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-93f19e6e-e241-4f61-bc8d-fd1b577a14a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.885873] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1047.885873] env[69982]: value = "task-3865171" [ 1047.885873] env[69982]: _type = "Task" [ 1047.885873] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.894985] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865171, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.907212] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.596s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.907486] env[69982]: INFO nova.compute.manager [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Migrating [ 1047.916358] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 23.478s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.916574] env[69982]: DEBUG nova.objects.instance [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1048.145656] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Successfully updated port: 53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1048.156160] env[69982]: DEBUG nova.network.neutron [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.172896] env[69982]: DEBUG nova.compute.manager [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Received event network-changed-bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.173645] env[69982]: DEBUG nova.compute.manager [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Refreshing instance network info cache due to event network-changed-bcebda23-660e-4d10-a486-029c8cef42dc. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1048.173927] env[69982]: DEBUG oslo_concurrency.lockutils [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] Acquiring lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.174445] env[69982]: DEBUG oslo_concurrency.lockutils [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] Acquired lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.174445] env[69982]: DEBUG nova.network.neutron [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Refreshing network info cache for port bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1048.296095] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865170, 'name': CreateVM_Task, 'duration_secs': 0.377738} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.296287] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.296994] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.297184] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.297515] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1048.297782] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef88c232-ada6-40f6-9ea7-f336bd0692cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.303336] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1048.303336] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b4e9-7dfc-8284-80c5-6b91834a490c" [ 1048.303336] env[69982]: _type = "Task" [ 1048.303336] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.311742] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b4e9-7dfc-8284-80c5-6b91834a490c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.397218] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865171, 'name': ReconfigVM_Task, 'duration_secs': 0.303472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.397596] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 9c0d0f4f-9e88-4e67-99d9-d957652587cd/9c0d0f4f-9e88-4e67-99d9-d957652587cd.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.398459] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b9ddfaba-84f9-4542-86d0-a25dbcb25379 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.406767] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1048.406767] env[69982]: value = "task-3865172" [ 1048.406767] env[69982]: _type = "Task" [ 1048.406767] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.415913] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865172, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.430928] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.431210] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.431322] env[69982]: DEBUG nova.network.neutron [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1048.648278] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.648463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.648705] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1048.658238] env[69982]: INFO nova.compute.manager [-] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Took 1.36 seconds to deallocate network for instance. [ 1048.809918] env[69982]: DEBUG nova.compute.manager [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Received event network-vif-plugged-53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.810192] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Acquiring lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.810794] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.810794] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.810794] env[69982]: DEBUG nova.compute.manager [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] No waiting events found dispatching network-vif-plugged-53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1048.810961] env[69982]: WARNING nova.compute.manager [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Received unexpected event network-vif-plugged-53fdfe60-22a6-4234-b170-b003d2f42494 for instance with vm_state building and task_state spawning. [ 1048.811011] env[69982]: DEBUG nova.compute.manager [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Received event network-changed-53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.811154] env[69982]: DEBUG nova.compute.manager [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Refreshing instance network info cache due to event network-changed-53fdfe60-22a6-4234-b170-b003d2f42494. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1048.811317] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Acquiring lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.818299] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5229b4e9-7dfc-8284-80c5-6b91834a490c, 'name': SearchDatastore_Task, 'duration_secs': 0.012034} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.818929] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.818929] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.819074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.819194] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.819375] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.819653] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77f30e93-bea8-41fe-80a3-0a92987dd5ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.829465] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.830180] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.831264] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08af4fe6-377e-48a0-aa9e-68bace49a9da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.837684] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1048.837684] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259ba5d-5dee-7ec0-0454-4ab12ec94435" [ 1048.837684] env[69982]: _type = "Task" [ 1048.837684] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.846769] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259ba5d-5dee-7ec0-0454-4ab12ec94435, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.918436] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865172, 'name': Rename_Task, 'duration_secs': 0.16121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.918650] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.918942] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c0e199c-3e11-4d63-b267-127bf1a199d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.929606] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1048.929606] env[69982]: value = "task-3865173" [ 1048.929606] env[69982]: _type = "Task" [ 1048.929606] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.930675] env[69982]: DEBUG nova.network.neutron [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Updated VIF entry in instance network info cache for port bcebda23-660e-4d10-a486-029c8cef42dc. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1048.931040] env[69982]: DEBUG nova.network.neutron [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Updating instance_info_cache with network_info: [{"id": "bcebda23-660e-4d10-a486-029c8cef42dc", "address": "fa:16:3e:1d:b3:d5", "network": {"id": "408eb348-084c-45e3-9ea6-26aa0dddabf7", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-836353718-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "17451571a4124ee0b2d674b9bd38c66e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ded18042-834c-4792-b3e8-b1c377446432", "external-id": "nsx-vlan-transportzone-293", "segmentation_id": 293, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcebda23-66", "ovs_interfaceid": "bcebda23-660e-4d10-a486-029c8cef42dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.936688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d46e06a2-6417-4771-b95d-ceb38ca65831 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.940027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.165s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.940235] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.942173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.725s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.944517] env[69982]: INFO nova.compute.claims [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1048.953809] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865173, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.987710] env[69982]: INFO nova.scheduler.client.report [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted allocations for instance d2684194-a688-4466-9852-1f4ff656f057 [ 1049.166467] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.200902] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1049.224913] env[69982]: DEBUG nova.network.neutron [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.348946] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259ba5d-5dee-7ec0-0454-4ab12ec94435, 'name': SearchDatastore_Task, 'duration_secs': 0.010939} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.352453] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22240905-f8dc-4cde-9cb7-ce4971686d0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.362428] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1049.362428] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52130981-143a-1f33-0f61-d0e953051ff9" [ 1049.362428] env[69982]: _type = "Task" [ 1049.362428] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.372294] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52130981-143a-1f33-0f61-d0e953051ff9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.425718] env[69982]: DEBUG nova.network.neutron [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updating instance_info_cache with network_info: [{"id": "53fdfe60-22a6-4234-b170-b003d2f42494", "address": "fa:16:3e:4b:0b:47", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fdfe60-22", "ovs_interfaceid": "53fdfe60-22a6-4234-b170-b003d2f42494", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.438675] env[69982]: DEBUG oslo_concurrency.lockutils [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] Releasing lock "refresh_cache-00f8efe0-28ad-4d95-b931-a31de0c03bd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.439021] env[69982]: DEBUG nova.compute.manager [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Received event network-vif-deleted-8e419c4b-3285-40f4-972c-96a55d41ddd6 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1049.439211] env[69982]: INFO nova.compute.manager [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Neutron deleted interface 8e419c4b-3285-40f4-972c-96a55d41ddd6; detaching it from the instance and deleting it from the info cache [ 1049.439386] env[69982]: DEBUG nova.network.neutron [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.447746] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865173, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.503489] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5338214c-208f-45d4-bd13-bff84139e8df tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "d2684194-a688-4466-9852-1f4ff656f057" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.370s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.730735] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.874778] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52130981-143a-1f33-0f61-d0e953051ff9, 'name': SearchDatastore_Task, 'duration_secs': 0.012211} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.875106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.875416] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 00f8efe0-28ad-4d95-b931-a31de0c03bd7/00f8efe0-28ad-4d95-b931-a31de0c03bd7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.875781] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-469e4adf-7764-42e3-86e3-b5bd0aaed726 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.884633] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1049.884633] env[69982]: value = "task-3865174" [ 1049.884633] env[69982]: _type = "Task" [ 1049.884633] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.893982] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865174, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.927854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.928281] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Instance network_info: |[{"id": "53fdfe60-22a6-4234-b170-b003d2f42494", "address": "fa:16:3e:4b:0b:47", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fdfe60-22", "ovs_interfaceid": "53fdfe60-22a6-4234-b170-b003d2f42494", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1049.928640] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Acquired lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.928870] env[69982]: DEBUG nova.network.neutron [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Refreshing network info cache for port 53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1049.930314] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:0b:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53fdfe60-22a6-4234-b170-b003d2f42494', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1049.938828] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.940012] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1049.943840] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-759cc412-25cf-4abb-a527-be06c58c1a3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.964183] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a28596d-b82d-4837-9185-2f6abefc2196 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.973193] env[69982]: DEBUG oslo_vmware.api [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865173, 'name': PowerOnVM_Task, 'duration_secs': 0.631494} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.975677] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.975985] env[69982]: INFO nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Took 7.79 seconds to spawn the instance on the hypervisor. [ 1049.976350] env[69982]: DEBUG nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.976504] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1049.976504] env[69982]: value = "task-3865175" [ 1049.976504] env[69982]: _type = "Task" [ 1049.976504] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.977230] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c65857f-d6c9-4aaa-81af-30a31284357c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.982897] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb076581-7ab4-411a-b016-38002c7df4ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.008650] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865175, 'name': CreateVM_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.031134] env[69982]: DEBUG nova.compute.manager [req-cab48a04-f00e-489d-a30b-d89311bb8317 req-3c845518-9058-496c-b1c8-c7323f83ec3e service nova] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Detach interface failed, port_id=8e419c4b-3285-40f4-972c-96a55d41ddd6, reason: Instance a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1050.054579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.054949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.055215] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.055462] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.055695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.058614] env[69982]: INFO nova.compute.manager [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Terminating instance [ 1050.402967] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865174, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.413373] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b1817c-83ff-4a99-9799-6c124c381451 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.424883] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c06014f-505d-49f4-8482-d01779164023 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.461038] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9583e0-5298-4ee2-b5dd-2492f0820843 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.469784] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2202c3aa-af9c-4350-b371-35f5b6c8d7df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.484326] env[69982]: DEBUG nova.compute.provider_tree [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.508599] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865175, 'name': CreateVM_Task, 'duration_secs': 0.424636} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.508783] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1050.509455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.509633] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.509967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1050.510262] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a498342-79d4-492d-8248-111eeacbf9d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.520453] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1050.520453] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c0eff-80c2-7f74-103c-4fe6131f681f" [ 1050.520453] env[69982]: _type = "Task" [ 1050.520453] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.526270] env[69982]: INFO nova.compute.manager [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Took 33.81 seconds to build instance. [ 1050.533647] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528c0eff-80c2-7f74-103c-4fe6131f681f, 'name': SearchDatastore_Task, 'duration_secs': 0.00997} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.533861] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.534200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.534441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.534595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.534782] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.535067] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee30712b-fca4-4c66-a2f2-881fe1e0e6c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.544563] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.544764] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.545771] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6666d9c-279f-4d45-b3e1-172da58d3789 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.552029] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1050.552029] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667b1e-b102-fe5d-bf34-145fe82bcca5" [ 1050.552029] env[69982]: _type = "Task" [ 1050.552029] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.560541] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667b1e-b102-fe5d-bf34-145fe82bcca5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.565281] env[69982]: DEBUG nova.compute.manager [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.565550] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.566321] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b191c18b-afa7-481f-9a5e-005f2e30fc65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.576131] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.576476] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6b3cec5-8dae-4866-96a6-5d83115ef801 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.583462] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 1050.583462] env[69982]: value = "task-3865176" [ 1050.583462] env[69982]: _type = "Task" [ 1050.583462] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.598384] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.782156] env[69982]: DEBUG nova.network.neutron [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updated VIF entry in instance network info cache for port 53fdfe60-22a6-4234-b170-b003d2f42494. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1050.782569] env[69982]: DEBUG nova.network.neutron [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updating instance_info_cache with network_info: [{"id": "53fdfe60-22a6-4234-b170-b003d2f42494", "address": "fa:16:3e:4b:0b:47", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fdfe60-22", "ovs_interfaceid": "53fdfe60-22a6-4234-b170-b003d2f42494", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.899237] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865174, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549205} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.899619] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 00f8efe0-28ad-4d95-b931-a31de0c03bd7/00f8efe0-28ad-4d95-b931-a31de0c03bd7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1050.899989] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1050.900384] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35ba4292-286f-4058-8934-c8a08568693b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.909196] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1050.909196] env[69982]: value = "task-3865177" [ 1050.909196] env[69982]: _type = "Task" [ 1050.909196] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.922173] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865177, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.988392] env[69982]: DEBUG nova.scheduler.client.report [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.028979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-694e304d-68b5-48ca-abb1-ea1c9c889c3a tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.327s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.062492] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667b1e-b102-fe5d-bf34-145fe82bcca5, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.063473] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c4e5ba-d8e7-4d24-82eb-65b0a4dc7b91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.069544] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1051.069544] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a85cfb-2bed-b3b9-e96d-2d81a0dd5512" [ 1051.069544] env[69982]: _type = "Task" [ 1051.069544] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.078744] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a85cfb-2bed-b3b9-e96d-2d81a0dd5512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.092994] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865176, 'name': PowerOffVM_Task, 'duration_secs': 0.28249} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.093431] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.093603] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.093900] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2508060d-6d57-4f8b-a91d-7e8911c09e44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.180765] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.181059] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.181250] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleting the datastore file [datastore1] 9123b08c-d2ec-4c4d-bade-0acdae75640a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.181541] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a551bc8-7a87-41cc-8224-f4e3a7349162 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.189205] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for the task: (returnval){ [ 1051.189205] env[69982]: value = "task-3865179" [ 1051.189205] env[69982]: _type = "Task" [ 1051.189205] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.198355] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.249484] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3254c95-3932-4a48-96c7-2065b9c7972e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.273232] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.285754] env[69982]: DEBUG oslo_concurrency.lockutils [req-69652a0c-8586-4fd5-9355-d09cd1b0b4eb req-655b0e69-c33d-4151-8862-dbd09f415d9a service nova] Releasing lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.419530] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865177, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077904} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.419833] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1051.420712] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e075eec9-0482-4a29-974f-636faf020882 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.444564] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 00f8efe0-28ad-4d95-b931-a31de0c03bd7/00f8efe0-28ad-4d95-b931-a31de0c03bd7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.445117] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8ba7802-4d19-4efe-976d-6b96a915ca42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.469517] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1051.469517] env[69982]: value = "task-3865180" [ 1051.469517] env[69982]: _type = "Task" [ 1051.469517] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.480486] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.493666] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.495054] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1051.497082] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.237s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.499611] env[69982]: INFO nova.compute.claims [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1051.582465] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a85cfb-2bed-b3b9-e96d-2d81a0dd5512, 'name': SearchDatastore_Task, 'duration_secs': 0.048926} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.583172] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.583172] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5bbc7b58-3e8e-495f-911a-072d282e48a9/5bbc7b58-3e8e-495f-911a-072d282e48a9.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1051.583638] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37f51ba8-ecce-4f66-9e5a-a3284bced503 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.590561] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1051.590561] env[69982]: value = "task-3865181" [ 1051.590561] env[69982]: _type = "Task" [ 1051.590561] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.600750] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.636060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.636268] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.701866] env[69982]: DEBUG oslo_vmware.api [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Task: {'id': task-3865179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225312} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.701866] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.701866] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.701866] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.701866] env[69982]: INFO nova.compute.manager [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1051.702180] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.702180] env[69982]: DEBUG nova.compute.manager [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.702180] env[69982]: DEBUG nova.network.neutron [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1051.779401] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.780081] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b24b80ae-bfa6-4cc5-9bc2-d466ee01cf88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.790651] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1051.790651] env[69982]: value = "task-3865182" [ 1051.790651] env[69982]: _type = "Task" [ 1051.790651] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.800721] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1051.800934] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.984087] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865180, 'name': ReconfigVM_Task, 'duration_secs': 0.375709} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.984443] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 00f8efe0-28ad-4d95-b931-a31de0c03bd7/00f8efe0-28ad-4d95-b931-a31de0c03bd7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.985157] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd03b47a-5563-4d67-99ee-5b2ac11eb696 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.992502] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1051.992502] env[69982]: value = "task-3865183" [ 1051.992502] env[69982]: _type = "Task" [ 1051.992502] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.001488] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865183, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.005086] env[69982]: DEBUG nova.compute.utils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1052.008925] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1052.008925] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1052.081228] env[69982]: DEBUG nova.policy [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbe1397ab59c4dacbfd8418f1b2eaa70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf2351d172d94606a82ec7e5eabc6faa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1052.105351] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865181, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.141204] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1052.303295] env[69982]: DEBUG nova.compute.manager [req-af72c664-e030-43bd-b033-aefdea317391 req-149faac0-3a2a-4d0b-98a1-43be5a20c6fb service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Received event network-vif-deleted-0e9d6238-fb82-48aa-8702-091435aae1b1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.303521] env[69982]: INFO nova.compute.manager [req-af72c664-e030-43bd-b033-aefdea317391 req-149faac0-3a2a-4d0b-98a1-43be5a20c6fb service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Neutron deleted interface 0e9d6238-fb82-48aa-8702-091435aae1b1; detaching it from the instance and deleting it from the info cache [ 1052.303752] env[69982]: DEBUG nova.network.neutron [req-af72c664-e030-43bd-b033-aefdea317391 req-149faac0-3a2a-4d0b-98a1-43be5a20c6fb service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.307174] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1052.307428] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1052.307589] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1052.307770] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1052.307928] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1052.308117] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1052.308331] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1052.308512] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1052.308741] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1052.308864] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1052.309053] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1052.315281] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-257ffb22-a0f6-4f2c-9fcf-e4fb9c63adf9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.334661] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1052.334661] env[69982]: value = "task-3865184" [ 1052.334661] env[69982]: _type = "Task" [ 1052.334661] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.345030] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.486992] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Successfully created port: 9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1052.504503] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865183, 'name': Rename_Task, 'duration_secs': 0.187521} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.504503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.504931] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-009ca9a6-7a02-4be0-8d96-d032243f59dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.509696] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1052.518342] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1052.518342] env[69982]: value = "task-3865185" [ 1052.518342] env[69982]: _type = "Task" [ 1052.518342] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.530319] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865185, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.608392] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.759223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.608642] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 5bbc7b58-3e8e-495f-911a-072d282e48a9/5bbc7b58-3e8e-495f-911a-072d282e48a9.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1052.608855] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1052.609125] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e773c378-264c-452c-96a8-02f1ed08df9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.622481] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1052.622481] env[69982]: value = "task-3865186" [ 1052.622481] env[69982]: _type = "Task" [ 1052.622481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.635908] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.663255] env[69982]: DEBUG nova.network.neutron [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.667730] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.815766] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99eec1cd-02d2-462d-901f-542113efb99e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.827047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7864da0c-aa92-45eb-b7dd-bdeea1a27515 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.856381] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865184, 'name': ReconfigVM_Task, 'duration_secs': 0.189396} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.856381] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1052.879702] env[69982]: DEBUG nova.compute.manager [req-af72c664-e030-43bd-b033-aefdea317391 req-149faac0-3a2a-4d0b-98a1-43be5a20c6fb service nova] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Detach interface failed, port_id=0e9d6238-fb82-48aa-8702-091435aae1b1, reason: Instance 9123b08c-d2ec-4c4d-bade-0acdae75640a could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1052.991153] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55759e0f-b66b-426c-8b67-45e02323fb8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.000372] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb2ded5-4daa-4b23-b270-4ae247945b18 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.051934] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67725a3b-c1c7-4944-bdb5-5b56ecf4ad16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.063621] env[69982]: DEBUG oslo_vmware.api [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865185, 'name': PowerOnVM_Task, 'duration_secs': 0.542009} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.069472] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.069472] env[69982]: INFO nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1053.069472] env[69982]: DEBUG nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.069472] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b2e20f-e7ab-4901-b0b5-ed4a627fe221 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.072655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49562d08-be01-4ab6-9ee5-4fd7b18f222c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.089416] env[69982]: DEBUG nova.compute.provider_tree [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.139280] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090712} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.139622] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1053.140535] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e667052b-5028-40a7-855c-849c77d8e00a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.171669] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 5bbc7b58-3e8e-495f-911a-072d282e48a9/5bbc7b58-3e8e-495f-911a-072d282e48a9.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.171669] env[69982]: INFO nova.compute.manager [-] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Took 1.47 seconds to deallocate network for instance. [ 1053.171669] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a49c7488-e875-47e3-9105-9fce3557451c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.205495] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1053.205495] env[69982]: value = "task-3865187" [ 1053.205495] env[69982]: _type = "Task" [ 1053.205495] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.215493] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865187, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.362588] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.362888] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.363082] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.363292] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.363441] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.363588] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.363885] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.363991] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.364128] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.364269] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.364447] env[69982]: DEBUG nova.virt.hardware [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.369854] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1053.370629] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e070d877-5beb-4f85-acf2-dacc8e1c0528 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.389733] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1053.389733] env[69982]: value = "task-3865188" [ 1053.389733] env[69982]: _type = "Task" [ 1053.389733] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.399154] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.546666] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.546974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.547256] env[69982]: INFO nova.compute.manager [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Shelving [ 1053.556427] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1053.583374] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1053.584113] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1053.584113] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1053.584113] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1053.584113] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1053.584275] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1053.585024] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1053.585024] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1053.585024] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1053.585024] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1053.585233] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1053.586387] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d151e51-fc4a-49bc-8498-2ff09a1f1af9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.596783] env[69982]: DEBUG nova.scheduler.client.report [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.607036] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26b657b-aafe-42a2-b8b6-500104168272 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.613429] env[69982]: INFO nova.compute.manager [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Took 35.91 seconds to build instance. [ 1053.699183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.717332] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865187, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.902726] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865188, 'name': ReconfigVM_Task, 'duration_secs': 0.193722} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.903116] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1053.904024] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a048d960-3148-4878-93f6-e32bd18b14b8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.929563] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1053.929958] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20a8d6e0-3e93-4518-aeca-a043504ca1d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.949468] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1053.949468] env[69982]: value = "task-3865189" [ 1053.949468] env[69982]: _type = "Task" [ 1053.949468] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.958670] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865189, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.110286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.610s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.110286] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1054.113615] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.376s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.114266] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.119320] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.830s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.121037] env[69982]: INFO nova.compute.claims [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1054.124388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ee922536-667f-40de-8a66-05409e0b183a tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.428s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.154848] env[69982]: INFO nova.scheduler.client.report [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Deleted allocations for instance 3edfba43-55e9-4180-bb03-ce008af3a7d7 [ 1054.218139] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865187, 'name': ReconfigVM_Task, 'duration_secs': 0.917558} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.219140] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 5bbc7b58-3e8e-495f-911a-072d282e48a9/5bbc7b58-3e8e-495f-911a-072d282e48a9.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.219865] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3242e410-355e-4f64-88b5-7ff209bd9223 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.229273] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1054.229273] env[69982]: value = "task-3865190" [ 1054.229273] env[69982]: _type = "Task" [ 1054.229273] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.240798] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865190, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.462584] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865189, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.470278] env[69982]: DEBUG nova.compute.manager [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Received event network-vif-plugged-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1054.470589] env[69982]: DEBUG oslo_concurrency.lockutils [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] Acquiring lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.470819] env[69982]: DEBUG oslo_concurrency.lockutils [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.471060] env[69982]: DEBUG oslo_concurrency.lockutils [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.471247] env[69982]: DEBUG nova.compute.manager [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] No waiting events found dispatching network-vif-plugged-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1054.471415] env[69982]: WARNING nova.compute.manager [req-4fa53d7c-ce0f-41b0-be3a-d881f66f540b req-e4c6a945-43c5-4a3f-8e3f-a94d879f149d service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Received unexpected event network-vif-plugged-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 for instance with vm_state building and task_state spawning. [ 1054.519208] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Successfully updated port: 9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1054.557569] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1054.558053] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4865b6f4-a8ce-4045-8760-206a595cd3f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.565444] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1054.565444] env[69982]: value = "task-3865191" [ 1054.565444] env[69982]: _type = "Task" [ 1054.565444] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.576139] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.629026] env[69982]: DEBUG nova.compute.utils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1054.632338] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1054.634058] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1054.670193] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57277395-458d-454e-ad9e-c1228de70c63 tempest-ImagesNegativeTestJSON-1648597849 tempest-ImagesNegativeTestJSON-1648597849-project-member] Lock "3edfba43-55e9-4180-bb03-ce008af3a7d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.097s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.686229] env[69982]: DEBUG nova.policy [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbe1397ab59c4dacbfd8418f1b2eaa70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf2351d172d94606a82ec7e5eabc6faa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1054.741732] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865190, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.962610] env[69982]: DEBUG oslo_vmware.api [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865189, 'name': ReconfigVM_Task, 'duration_secs': 0.982224} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.962935] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Reconfigured VM instance instance-0000003d to attach disk [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00/d21659fd-015d-4f5b-b4b5-f38f550e0f00.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1054.963287] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.021784] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.021983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.022210] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1055.043435] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Successfully created port: 099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1055.076405] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865191, 'name': PowerOffVM_Task, 'duration_secs': 0.341074} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.076830] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.077540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a22b5fe-fb51-4838-915b-b0aeeb06dbae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.097265] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fa9190-63e2-4beb-8b38-ce6dddf311ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.137584] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1055.244551] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865190, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.285392] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.285917] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.286311] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.287223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.287552] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.291514] env[69982]: INFO nova.compute.manager [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Terminating instance [ 1055.476864] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef30dec5-f9b6-494b-9ac7-dd65175cbef4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.501119] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3527e8-e150-40e3-83e8-70cec8cfadd9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.520831] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1055.564967] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0193bbb-71e7-4a61-9669-5f387da242cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.570684] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64c37f-e50a-4e4f-b365-0015098e8323 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.577926] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1055.610398] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1055.613343] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1b29187e-5acd-47c1-bc16-86800f3c5b08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.616503] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c82757b-b7e8-4dc9-b2bc-ed37692ae69c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.629050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f928e512-2ce3-4de0-a659-2d67bd81d599 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.635156] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1055.635156] env[69982]: value = "task-3865192" [ 1055.635156] env[69982]: _type = "Task" [ 1055.635156] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.651084] env[69982]: DEBUG nova.compute.provider_tree [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.661828] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865192, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.743770] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865190, 'name': Rename_Task, 'duration_secs': 1.162514} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.743938] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.746255] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30b5840d-12d4-4afd-bc70-e1314654c8bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.752443] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1055.752443] env[69982]: value = "task-3865193" [ 1055.752443] env[69982]: _type = "Task" [ 1055.752443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.761105] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.798701] env[69982]: DEBUG nova.compute.manager [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.798983] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.799961] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba67713-febc-46f3-9bd8-1c8f578f4745 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.808953] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.809296] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf44e87a-e414-40b8-a281-e591524e43a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.817738] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1055.817738] env[69982]: value = "task-3865194" [ 1055.817738] env[69982]: _type = "Task" [ 1055.817738] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.828402] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865194, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.838272] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Updating instance_info_cache with network_info: [{"id": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "address": "fa:16:3e:dd:c4:ca", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9206fa5a-6a", "ovs_interfaceid": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.081232] env[69982]: DEBUG nova.network.neutron [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Port cc682899-b104-4e53-b80d-49a30d6e0316 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1056.150422] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865192, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.158640] env[69982]: DEBUG nova.scheduler.client.report [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.163660] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1056.208594] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1056.209019] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1056.209184] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1056.209440] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1056.209644] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1056.209869] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1056.210170] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1056.210393] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1056.210708] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1056.210926] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1056.211169] env[69982]: DEBUG nova.virt.hardware [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1056.212141] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb30472-dc3b-4e53-8dcb-0399506f59af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.224697] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d37bec5-3848-40b2-a0f8-8c2913672a93 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.266488] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865193, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.332643] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865194, 'name': PowerOffVM_Task, 'duration_secs': 0.179405} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.333035] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.333270] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.333604] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10b5fdb6-c483-4462-b824-1a2020ff3bcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.341314] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.341721] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Instance network_info: |[{"id": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "address": "fa:16:3e:dd:c4:ca", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9206fa5a-6a", "ovs_interfaceid": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1056.342258] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:c4:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9206fa5a-6a33-4a8f-aeee-827c3f1d0b46', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.352381] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating folder: Project (bf2351d172d94606a82ec7e5eabc6faa). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1056.352381] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0cfa2cd-23c1-49f9-aedc-1ce9753ea178 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.366294] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created folder: Project (bf2351d172d94606a82ec7e5eabc6faa) in parent group-v767796. [ 1056.366294] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating folder: Instances. Parent ref: group-v768049. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1056.366294] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e391de76-0bc2-435b-adf7-3762fdc1f47b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.382092] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created folder: Instances in parent group-v768049. [ 1056.382245] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.382656] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.383020] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dd186c6-01dd-4be4-b8c1-db4ad1ff1cef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.408158] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.408158] env[69982]: value = "task-3865198" [ 1056.408158] env[69982]: _type = "Task" [ 1056.408158] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.422267] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865198, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.433061] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.433478] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.433765] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Deleting the datastore file [datastore2] 00f8efe0-28ad-4d95-b931-a31de0c03bd7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.434535] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-415a7e1c-957c-4abf-864e-532e9ef3d81d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.447075] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for the task: (returnval){ [ 1056.447075] env[69982]: value = "task-3865199" [ 1056.447075] env[69982]: _type = "Task" [ 1056.447075] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.462622] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865199, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.513029] env[69982]: DEBUG nova.compute.manager [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Received event network-changed-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.513029] env[69982]: DEBUG nova.compute.manager [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Refreshing instance network info cache due to event network-changed-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1056.513029] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] Acquiring lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.513029] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] Acquired lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.513029] env[69982]: DEBUG nova.network.neutron [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Refreshing network info cache for port 9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1056.650276] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865192, 'name': CreateSnapshot_Task, 'duration_secs': 0.72267} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.650276] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1056.650276] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6811bf81-e0e3-479c-b240-f12c5ef70d37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.669628] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.669628] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1056.670798] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.586s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.671034] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.674922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 24.772s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.708433] env[69982]: INFO nova.scheduler.client.report [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted allocations for instance 69103bad-cb3f-4cd1-bfa1-c19b10395674 [ 1056.768865] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865193, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.920799] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865198, 'name': CreateVM_Task, 'duration_secs': 0.458328} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.920799] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1056.920799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.920799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.924353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1056.924353] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77f439cb-7ecb-4c70-afcf-fd24c59f5084 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.927548] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1056.927548] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c62fe2-b324-9992-5286-ab5e8beaa0bd" [ 1056.927548] env[69982]: _type = "Task" [ 1056.927548] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.938123] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c62fe2-b324-9992-5286-ab5e8beaa0bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.957717] env[69982]: DEBUG oslo_vmware.api [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Task: {'id': task-3865199, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25559} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.959456] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Successfully updated port: 099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1056.959920] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.960045] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.960313] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.960397] env[69982]: INFO nova.compute.manager [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1056.960640] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.961186] env[69982]: DEBUG nova.compute.manager [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.961369] env[69982]: DEBUG nova.network.neutron [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1057.103261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.103891] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.103891] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.173457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1057.174233] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f6b3541b-9226-4673-8f77-de4119578a0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.179293] env[69982]: DEBUG nova.compute.utils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1057.185423] env[69982]: INFO nova.compute.claims [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.192517] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1057.196775] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1057.198169] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1057.211639] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1057.211639] env[69982]: value = "task-3865200" [ 1057.211639] env[69982]: _type = "Task" [ 1057.211639] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.229913] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865200, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.230691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b8cd95fc-c1c1-4396-b47d-a445e57f1df3 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "69103bad-cb3f-4cd1-bfa1-c19b10395674" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.719s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.264911] env[69982]: DEBUG oslo_vmware.api [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865193, 'name': PowerOnVM_Task, 'duration_secs': 1.303928} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.265199] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.265404] env[69982]: INFO nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1057.265585] env[69982]: DEBUG nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.266538] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e98ab8-13c0-4f39-b555-64870be2c69d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.345700] env[69982]: DEBUG nova.policy [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '493da3ee04094ba4ac17893d999ac99e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9cc588ded27b49d4826535649105aa88', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1057.439246] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c62fe2-b324-9992-5286-ab5e8beaa0bd, 'name': SearchDatastore_Task, 'duration_secs': 0.01342} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.442681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.442681] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.442681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.442681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.442970] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.442970] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63f3c958-7802-4874-a03d-2cf581daefbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.454695] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.454695] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.454695] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f1df47b-ee2d-4d9c-a049-09bb201db5c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.462034] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.462225] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.462426] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1057.464333] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1057.464333] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525585b2-dbe8-50ba-1d5c-1f73db151c1d" [ 1057.464333] env[69982]: _type = "Task" [ 1057.464333] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.478023] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525585b2-dbe8-50ba-1d5c-1f73db151c1d, 'name': SearchDatastore_Task, 'duration_secs': 0.012358} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.478980] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09892731-d956-4843-b5b1-7898bffc285f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.487059] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1057.487059] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f23c2a-9d60-a0d5-0ddd-cdbc0a7e3f71" [ 1057.487059] env[69982]: _type = "Task" [ 1057.487059] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.499770] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f23c2a-9d60-a0d5-0ddd-cdbc0a7e3f71, 'name': SearchDatastore_Task, 'duration_secs': 0.010144} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.500141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.500533] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a/71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1057.502055] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c50af123-94b9-4ccc-ae53-936c65e2108d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.512024] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1057.512024] env[69982]: value = "task-3865201" [ 1057.512024] env[69982]: _type = "Task" [ 1057.512024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.514888] env[69982]: DEBUG nova.network.neutron [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Updated VIF entry in instance network info cache for port 9206fa5a-6a33-4a8f-aeee-827c3f1d0b46. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1057.515415] env[69982]: DEBUG nova.network.neutron [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Updating instance_info_cache with network_info: [{"id": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "address": "fa:16:3e:dd:c4:ca", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9206fa5a-6a", "ovs_interfaceid": "9206fa5a-6a33-4a8f-aeee-827c3f1d0b46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.522512] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.703981] env[69982]: INFO nova.compute.resource_tracker [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating resource usage from migration 0520604d-a406-402f-9686-aee9f2ea548e [ 1057.732242] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865200, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.757662] env[69982]: DEBUG nova.network.neutron [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.796910] env[69982]: INFO nova.compute.manager [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Took 36.37 seconds to build instance. [ 1058.020292] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Successfully created port: 7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.025364] env[69982]: DEBUG oslo_concurrency.lockutils [req-bfd214b9-4099-4e16-b249-e3cec4051401 req-8b0bd08e-ecf7-4af1-b95a-8c3e6da985b9 service nova] Releasing lock "refresh_cache-71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.034403] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865201, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.067796] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1058.192227] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.192523] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.193993] env[69982]: DEBUG nova.network.neutron [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1058.212973] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1058.227999] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865200, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.257361] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1058.258297] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1058.258297] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1058.258297] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1058.258297] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1058.258297] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1058.258538] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1058.258641] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1058.258804] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1058.258961] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1058.259150] env[69982]: DEBUG nova.virt.hardware [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1058.259628] env[69982]: INFO nova.compute.manager [-] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Took 1.30 seconds to deallocate network for instance. [ 1058.263018] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7315a24c-8d4a-426a-ad0c-719099afb5ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.288669] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab31755-1176-4d82-9941-fbd6e8232a23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.305781] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59cc87e3-a72a-4dd3-897d-33e412950b5d tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.890s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.322454] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4874a144-65de-4d98-805c-7c8288f900f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.332285] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f08e3df-a2a9-422b-b831-bc97cd30ad50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.368237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac805d39-f279-4094-bf68-bf678219215e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.377280] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0b1de4-82fe-470e-813d-eb8d099c28e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.392639] env[69982]: DEBUG nova.compute.provider_tree [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.474892] env[69982]: DEBUG nova.network.neutron [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Updating instance_info_cache with network_info: [{"id": "099b128a-c69d-49a3-8dea-548476244e6f", "address": "fa:16:3e:71:bd:bf", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099b128a-c6", "ovs_interfaceid": "099b128a-c69d-49a3-8dea-548476244e6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.522642] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.555059} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.522939] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a/71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.523459] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.523699] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3e4d050-556a-418b-9e80-12ad4f1116c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.531755] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1058.531755] env[69982]: value = "task-3865202" [ 1058.531755] env[69982]: _type = "Task" [ 1058.531755] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.547247] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865202, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.626237] env[69982]: DEBUG nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Received event network-vif-plugged-099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.626484] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Acquiring lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.627202] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.627202] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.627202] env[69982]: DEBUG nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] No waiting events found dispatching network-vif-plugged-099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1058.627202] env[69982]: WARNING nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Received unexpected event network-vif-plugged-099b128a-c69d-49a3-8dea-548476244e6f for instance with vm_state building and task_state spawning. [ 1058.627548] env[69982]: DEBUG nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Received event network-changed-099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.627548] env[69982]: DEBUG nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Refreshing instance network info cache due to event network-changed-099b128a-c69d-49a3-8dea-548476244e6f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.627704] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Acquiring lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.731316] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865200, 'name': CloneVM_Task, 'duration_secs': 1.345606} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.731502] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Created linked-clone VM from snapshot [ 1058.732323] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3400894-c321-4482-94d9-d9081743f88d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.740600] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Uploading image 07979a20-baf7-482a-918a-853610d09226 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1058.767623] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1058.767623] env[69982]: value = "vm-768052" [ 1058.767623] env[69982]: _type = "VirtualMachine" [ 1058.767623] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1058.768268] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ac3d1d7a-f682-4eb9-a504-45a5585f977b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.778593] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lease: (returnval){ [ 1058.778593] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205edb8-b1f1-e11c-b967-abb981b1a2db" [ 1058.778593] env[69982]: _type = "HttpNfcLease" [ 1058.778593] env[69982]: } obtained for exporting VM: (result){ [ 1058.778593] env[69982]: value = "vm-768052" [ 1058.778593] env[69982]: _type = "VirtualMachine" [ 1058.778593] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1058.778593] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the lease: (returnval){ [ 1058.778593] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205edb8-b1f1-e11c-b967-abb981b1a2db" [ 1058.778593] env[69982]: _type = "HttpNfcLease" [ 1058.778593] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1058.785241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.788799] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1058.788799] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205edb8-b1f1-e11c-b967-abb981b1a2db" [ 1058.788799] env[69982]: _type = "HttpNfcLease" [ 1058.788799] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1058.895841] env[69982]: DEBUG nova.scheduler.client.report [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.977537] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.977920] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Instance network_info: |[{"id": "099b128a-c69d-49a3-8dea-548476244e6f", "address": "fa:16:3e:71:bd:bf", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099b128a-c6", "ovs_interfaceid": "099b128a-c69d-49a3-8dea-548476244e6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1058.978355] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Acquired lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.978576] env[69982]: DEBUG nova.network.neutron [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Refreshing network info cache for port 099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1058.984803] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:bd:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '099b128a-c69d-49a3-8dea-548476244e6f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.000199] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.001328] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.001577] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-707f6019-f58f-41ad-89f8-b28beab9318b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.023193] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.023193] env[69982]: value = "task-3865204" [ 1059.023193] env[69982]: _type = "Task" [ 1059.023193] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.033982] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865204, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.046994] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865202, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083265} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.047411] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1059.048944] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4f8f08-0a31-4044-a6ab-2ce6ef1164eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.075221] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a/71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.075856] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd91e323-0dc3-4806-be09-123e4cc5da15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.099137] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1059.099137] env[69982]: value = "task-3865205" [ 1059.099137] env[69982]: _type = "Task" [ 1059.099137] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.112684] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865205, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.256887] env[69982]: DEBUG nova.network.neutron [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.288526] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1059.288526] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205edb8-b1f1-e11c-b967-abb981b1a2db" [ 1059.288526] env[69982]: _type = "HttpNfcLease" [ 1059.288526] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1059.288878] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1059.288878] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5205edb8-b1f1-e11c-b967-abb981b1a2db" [ 1059.288878] env[69982]: _type = "HttpNfcLease" [ 1059.288878] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1059.289818] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f377a50-de2a-4018-ba3d-26599d867754 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.303991] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1059.303991] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1059.401236] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.728s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.401462] env[69982]: INFO nova.compute.manager [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Migrating [ 1059.408764] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.048s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.408967] env[69982]: DEBUG nova.objects.instance [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1059.478257] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f923d9df-9007-4419-9154-7fd753232c2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.540026] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865204, 'name': CreateVM_Task, 'duration_secs': 0.372977} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.540026] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1059.540026] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.540026] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.540026] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1059.540026] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7725f07-ce4d-492e-a6cd-ec1e44a1db6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.547576] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1059.547576] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521f5df1-8e12-dcce-1d91-e748d15f812d" [ 1059.547576] env[69982]: _type = "Task" [ 1059.547576] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.556876] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521f5df1-8e12-dcce-1d91-e748d15f812d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.610813] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865205, 'name': ReconfigVM_Task, 'duration_secs': 0.484121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.611197] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a/71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1059.612059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dac2d0a1-1116-4824-b479-b26cbc996c5e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.620147] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1059.620147] env[69982]: value = "task-3865206" [ 1059.620147] env[69982]: _type = "Task" [ 1059.620147] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.636278] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865206, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.760597] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.922683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.923060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.923196] env[69982]: DEBUG nova.network.neutron [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1060.038263] env[69982]: DEBUG nova.network.neutron [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Updated VIF entry in instance network info cache for port 099b128a-c69d-49a3-8dea-548476244e6f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1060.040792] env[69982]: DEBUG nova.network.neutron [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Updating instance_info_cache with network_info: [{"id": "099b128a-c69d-49a3-8dea-548476244e6f", "address": "fa:16:3e:71:bd:bf", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap099b128a-c6", "ovs_interfaceid": "099b128a-c69d-49a3-8dea-548476244e6f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.059923] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521f5df1-8e12-dcce-1d91-e748d15f812d, 'name': SearchDatastore_Task, 'duration_secs': 0.016726} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.060400] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.061248] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.061248] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.061248] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.061822] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.061822] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7854249c-1b2a-484b-8d91-f79604975677 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.071170] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.071294] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.072091] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1154694-dd00-447d-a327-eca16fa226e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.078472] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1060.078472] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272243f-0b5b-3680-95f1-8fe94d88190a" [ 1060.078472] env[69982]: _type = "Task" [ 1060.078472] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.088810] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272243f-0b5b-3680-95f1-8fe94d88190a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.132452] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865206, 'name': Rename_Task, 'duration_secs': 0.286241} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.133187] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.133658] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bb356bb-e3f2-4c0e-9f4b-9d0aa037e19e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.141809] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1060.141809] env[69982]: value = "task-3865207" [ 1060.141809] env[69982]: _type = "Task" [ 1060.141809] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.154536] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.170290] env[69982]: DEBUG nova.compute.manager [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Received event network-vif-plugged-7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.170290] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] Acquiring lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.170290] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.170290] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.170290] env[69982]: DEBUG nova.compute.manager [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] No waiting events found dispatching network-vif-plugged-7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1060.170653] env[69982]: WARNING nova.compute.manager [req-b1a46467-cbcb-47b0-896c-d9b07ba91624 req-3bfe882a-debb-4a8c-b742-bf0c34d861f6 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Received unexpected event network-vif-plugged-7d9d4676-21b3-4951-a9a4-21f80d05edb0 for instance with vm_state building and task_state spawning. [ 1060.298052] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe679385-eef9-433e-9999-1f90fe0c39d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.327104] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d55fc7e-341b-4a36-b7cd-bfb033506944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.337726] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.427392] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d362415b-a5a0-4514-9d1c-ac8520f39d88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.436443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.680s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.438249] env[69982]: INFO nova.compute.claims [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1060.509680] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Successfully updated port: 7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.541467] env[69982]: DEBUG oslo_concurrency.lockutils [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] Releasing lock "refresh_cache-0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.541717] env[69982]: DEBUG nova.compute.manager [req-49a2d8bd-779c-4a4b-b7a4-8a51d0c05a11 req-614115b6-43b0-4ca2-92ab-688aef5214a8 service nova] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Received event network-vif-deleted-bcebda23-660e-4d10-a486-029c8cef42dc {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.591411] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5272243f-0b5b-3680-95f1-8fe94d88190a, 'name': SearchDatastore_Task, 'duration_secs': 0.01011} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.592290] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b9c5ac2-a0b1-4f7c-aaaa-929e47309d90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.598410] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1060.598410] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529957f5-9ce2-2571-823c-6719a9c6dd6d" [ 1060.598410] env[69982]: _type = "Task" [ 1060.598410] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.608599] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529957f5-9ce2-2571-823c-6719a9c6dd6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.654910] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865207, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.846324] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8cf0f84c-7a68-42ca-a27b-c83e1d4bb111 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance 'd21659fd-015d-4f5b-b4b5-f38f550e0f00' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1060.855025] env[69982]: DEBUG nova.compute.manager [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Received event network-changed-53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.855025] env[69982]: DEBUG nova.compute.manager [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Refreshing instance network info cache due to event network-changed-53fdfe60-22a6-4234-b170-b003d2f42494. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1060.855286] env[69982]: DEBUG oslo_concurrency.lockutils [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] Acquiring lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.855482] env[69982]: DEBUG oslo_concurrency.lockutils [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] Acquired lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.855726] env[69982]: DEBUG nova.network.neutron [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Refreshing network info cache for port 53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1060.904562] env[69982]: DEBUG nova.network.neutron [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [{"id": "35894e50-7421-402a-91f6-e5a640cebd85", "address": "fa:16:3e:fe:6f:b0", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35894e50-74", "ovs_interfaceid": "35894e50-7421-402a-91f6-e5a640cebd85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.009157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.009331] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.009493] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1061.114427] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529957f5-9ce2-2571-823c-6719a9c6dd6d, 'name': SearchDatastore_Task, 'duration_secs': 0.010347} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.114924] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.115284] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb/0b973aa8-6d25-4de9-8a6b-7bb9f65671fb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.115912] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5eb0973a-817d-42e2-9b16-5f98fb05da37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.125452] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1061.125452] env[69982]: value = "task-3865208" [ 1061.125452] env[69982]: _type = "Task" [ 1061.125452] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.135865] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865208, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.154767] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865207, 'name': PowerOnVM_Task, 'duration_secs': 0.781536} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.155168] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.155453] env[69982]: INFO nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1061.155700] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.156672] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d34689-d964-4668-9e55-da553d6f7c65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.331396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.331938] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.407964] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.568295] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1061.640432] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865208, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.683422] env[69982]: INFO nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Took 33.50 seconds to build instance. [ 1061.778374] env[69982]: DEBUG nova.network.neutron [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updated VIF entry in instance network info cache for port 53fdfe60-22a6-4234-b170-b003d2f42494. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1061.778785] env[69982]: DEBUG nova.network.neutron [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updating instance_info_cache with network_info: [{"id": "53fdfe60-22a6-4234-b170-b003d2f42494", "address": "fa:16:3e:4b:0b:47", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53fdfe60-22", "ovs_interfaceid": "53fdfe60-22a6-4234-b170-b003d2f42494", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.837146] env[69982]: INFO nova.compute.manager [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Detaching volume 44d0e079-95f9-4407-b06b-9c0c5646922a [ 1061.854892] env[69982]: DEBUG nova.network.neutron [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Updating instance_info_cache with network_info: [{"id": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "address": "fa:16:3e:49:11:aa", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9d4676-21", "ovs_interfaceid": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.896143] env[69982]: INFO nova.virt.block_device [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Attempting to driver detach volume 44d0e079-95f9-4407-b06b-9c0c5646922a from mountpoint /dev/sdb [ 1061.896143] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1061.896143] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1061.896345] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb17422-04e0-4cec-a292-b00e7b2f24af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.934815] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb239ae0-323f-4832-a115-808c6a037160 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.946835] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d858d81-4919-4699-91b4-c7cb7033e32d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.953278] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcfcfa7-fec4-469c-9597-7684dfb1de7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.978482] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b7b05c-4246-45b7-88a8-4be328839353 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.985981] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad503753-12df-4e0d-b244-efe5f1dd40dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.005025] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] The volume has not been displaced from its original location: [datastore2] volume-44d0e079-95f9-4407-b06b-9c0c5646922a/volume-44d0e079-95f9-4407-b06b-9c0c5646922a.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1062.007986] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfiguring VM instance instance-00000049 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1062.009176] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-deb1db8e-c055-4a6c-aad3-c6d60baf53c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.050516] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41604caa-c697-4ad9-b85f-2e65baeb4367 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.055539] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1062.055539] env[69982]: value = "task-3865209" [ 1062.055539] env[69982]: _type = "Task" [ 1062.055539] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.062347] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a398ed-db06-4c35-a7a6-c909465e29c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.083292] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.083663] env[69982]: DEBUG nova.compute.provider_tree [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.139243] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865208, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524804} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.139576] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb/0b973aa8-6d25-4de9-8a6b-7bb9f65671fb.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.142039] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.142039] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef39aa0e-4379-4ab3-a544-e1837a32f53f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.149078] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1062.149078] env[69982]: value = "task-3865210" [ 1062.149078] env[69982]: _type = "Task" [ 1062.149078] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.159843] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.186332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.282662] env[69982]: DEBUG oslo_concurrency.lockutils [req-0d8ef615-bcf2-43e6-b26b-e704ee1158aa req-90f0d5c9-3226-4f60-b5f7-e9dcda3a2dfe service nova] Releasing lock "refresh_cache-5bbc7b58-3e8e-495f-911a-072d282e48a9" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.352343] env[69982]: DEBUG nova.compute.manager [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Received event network-changed-7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1062.352588] env[69982]: DEBUG nova.compute.manager [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Refreshing instance network info cache due to event network-changed-7d9d4676-21b3-4951-a9a4-21f80d05edb0. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1062.352681] env[69982]: DEBUG oslo_concurrency.lockutils [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] Acquiring lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.359393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.359722] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Instance network_info: |[{"id": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "address": "fa:16:3e:49:11:aa", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9d4676-21", "ovs_interfaceid": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1062.360294] env[69982]: DEBUG oslo_concurrency.lockutils [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] Acquired lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.360525] env[69982]: DEBUG nova.network.neutron [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Refreshing network info cache for port 7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1062.361887] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:11:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e31a7f15-a808-4199-9071-31fd05e316ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d9d4676-21b3-4951-a9a4-21f80d05edb0', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.369527] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.370287] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.370561] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4eb15a9b-c8b0-4d97-a91b-0221c525b3a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.400085] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.400085] env[69982]: value = "task-3865211" [ 1062.400085] env[69982]: _type = "Task" [ 1062.400085] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.423795] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865211, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.569767] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865209, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.586982] env[69982]: DEBUG nova.scheduler.client.report [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.660248] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12575} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.660519] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.661496] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e556306-1aa2-4cfa-b8ee-25e0369ee567 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.686939] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb/0b973aa8-6d25-4de9-8a6b-7bb9f65671fb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1062.687416] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37b6f553-6c04-40ab-8e20-3d894aff06dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.708137] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1062.708137] env[69982]: value = "task-3865212" [ 1062.708137] env[69982]: _type = "Task" [ 1062.708137] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.721746] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.919259] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865211, 'name': CreateVM_Task, 'duration_secs': 0.48323} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.919814] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.920167] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.920414] env[69982]: DEBUG nova.compute.manager [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Going to confirm migration 4 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1062.921821] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.927465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.927901] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.928287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.928688] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b63bafc3-d22b-40d6-935e-765ab1621dc3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.935139] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1062.935139] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5243afee-d4a1-4600-e650-650c64859085" [ 1062.935139] env[69982]: _type = "Task" [ 1062.935139] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.943306] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d412a7-71c5-4669-a265-f0f106553454 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.951156] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5243afee-d4a1-4600-e650-650c64859085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.971279] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1063.071210] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865209, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.092629] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.093231] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1063.096455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.319s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.096737] env[69982]: DEBUG nova.objects.instance [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lazy-loading 'resources' on Instance uuid 187cbba3-3700-4c40-a514-28e08ea13426 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.214035] env[69982]: DEBUG nova.network.neutron [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Updated VIF entry in instance network info cache for port 7d9d4676-21b3-4951-a9a4-21f80d05edb0. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1063.214492] env[69982]: DEBUG nova.network.neutron [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Updating instance_info_cache with network_info: [{"id": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "address": "fa:16:3e:49:11:aa", "network": {"id": "3262ab71-d084-41ac-a0cb-7411f879f42b", "bridge": "br-int", "label": "tempest-ImagesTestJSON-544516237-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9cc588ded27b49d4826535649105aa88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e31a7f15-a808-4199-9071-31fd05e316ea", "external-id": "nsx-vlan-transportzone-388", "segmentation_id": 388, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d9d4676-21", "ovs_interfaceid": "7d9d4676-21b3-4951-a9a4-21f80d05edb0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.223493] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865212, 'name': ReconfigVM_Task, 'duration_secs': 0.354664} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.223493] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb/0b973aa8-6d25-4de9-8a6b-7bb9f65671fb.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1063.223493] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09e89147-dba7-4a87-bdd3-5a407fafba6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.231346] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1063.231346] env[69982]: value = "task-3865213" [ 1063.231346] env[69982]: _type = "Task" [ 1063.231346] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.243831] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865213, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.450679] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5243afee-d4a1-4600-e650-650c64859085, 'name': SearchDatastore_Task, 'duration_secs': 0.014686} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.450679] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.450679] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.450679] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.451520] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.451520] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.451520] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-521db8a4-8112-44c4-8f4d-ee69a4be197f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.464194] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.464474] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.465281] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d10e80e-aa29-4887-be38-3e71949c9cc1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.474173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.474361] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.474535] env[69982]: DEBUG nova.network.neutron [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1063.474752] env[69982]: DEBUG nova.objects.instance [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'info_cache' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.481468] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1063.484029] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8caa335a-00ff-498f-88ec-0a27347216d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.486070] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1063.486070] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c38578-2464-a7c1-8d86-a191b08e68dc" [ 1063.486070] env[69982]: _type = "Task" [ 1063.486070] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.496657] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1063.496657] env[69982]: value = "task-3865214" [ 1063.496657] env[69982]: _type = "Task" [ 1063.496657] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.507534] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c38578-2464-a7c1-8d86-a191b08e68dc, 'name': SearchDatastore_Task, 'duration_secs': 0.011102} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.509439] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d0a7592-63eb-44a7-a5f0-dcc8988c4130 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.520498] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.525050] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1063.525050] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523fd82f-3b0c-72f4-6d28-035ac54cbc91" [ 1063.525050] env[69982]: _type = "Task" [ 1063.525050] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.534779] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523fd82f-3b0c-72f4-6d28-035ac54cbc91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.570186] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865209, 'name': ReconfigVM_Task, 'duration_secs': 1.468343} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.570186] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Reconfigured VM instance instance-00000049 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1063.575229] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c22f6544-7e38-41d5-b419-f2399942552e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.593900] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1063.593900] env[69982]: value = "task-3865215" [ 1063.593900] env[69982]: _type = "Task" [ 1063.593900] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.600376] env[69982]: DEBUG nova.compute.utils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1063.609567] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1063.610827] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1063.612404] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865215, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.691452] env[69982]: DEBUG nova.policy [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1b236e48f6614896893c7769b0f4b85d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5abc674cd29242f3b13fa1555f2a2877', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1063.716917] env[69982]: DEBUG oslo_concurrency.lockutils [req-04d4ec53-e2dc-4c4f-ada8-0e95531eff5e req-29d043cd-d8fc-4adc-a261-21665b33c2d2 service nova] Releasing lock "refresh_cache-68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.745544] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865213, 'name': Rename_Task, 'duration_secs': 0.156986} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.745544] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1063.745799] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-341e45ef-c7e4-4e11-bbb3-484b93ccecec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.754514] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1063.754514] env[69982]: value = "task-3865216" [ 1063.754514] env[69982]: _type = "Task" [ 1063.754514] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.766502] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865216, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.020878] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865214, 'name': PowerOffVM_Task, 'duration_secs': 0.211888} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.025794] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.025794] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1064.041463] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523fd82f-3b0c-72f4-6d28-035ac54cbc91, 'name': SearchDatastore_Task, 'duration_secs': 0.015758} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.041965] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.042223] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3/68c212df-c7a0-45c5-a00c-f94c6a9a9bb3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1064.042458] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a458960-e2cf-4225-82ad-c0b1bf67089d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.051236] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1064.051236] env[69982]: value = "task-3865217" [ 1064.051236] env[69982]: _type = "Task" [ 1064.051236] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.070277] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865217, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.074269] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f501a5-c558-42ad-9b0f-644b0e2c7ce1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.087989] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671ebd79-38d6-4918-9d10-fdd684c8a416 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.127612] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1064.131868] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0ecddf3-6ea2-45dc-b56d-452fc340388f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.146770] env[69982]: DEBUG oslo_vmware.api [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865215, 'name': ReconfigVM_Task, 'duration_secs': 0.186288} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.150099] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4362201-155f-4455-b764-96dc8a2eabf9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.154207] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768029', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'name': 'volume-44d0e079-95f9-4407-b06b-9c0c5646922a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82', 'attached_at': '', 'detached_at': '', 'volume_id': '44d0e079-95f9-4407-b06b-9c0c5646922a', 'serial': '44d0e079-95f9-4407-b06b-9c0c5646922a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1064.171356] env[69982]: DEBUG nova.compute.provider_tree [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.175770] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Successfully created port: ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.270331] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865216, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.539393] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1064.539393] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1064.539393] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1064.539393] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1064.544459] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1064.544459] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1064.544459] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1064.544459] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1064.544459] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1064.544930] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1064.544930] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1064.555717] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4314a029-f27e-40d7-9fdf-63bc833a06eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.585555] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865217, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.592020] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1064.592020] env[69982]: value = "task-3865218" [ 1064.592020] env[69982]: _type = "Task" [ 1064.592020] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.615744] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865218, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.679268] env[69982]: DEBUG nova.scheduler.client.report [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.724444] env[69982]: DEBUG nova.objects.instance [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'flavor' on Instance uuid 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.751577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.751922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.767812] env[69982]: DEBUG oslo_vmware.api [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865216, 'name': PowerOnVM_Task, 'duration_secs': 0.529124} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.768157] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1064.768390] env[69982]: INFO nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1064.768964] env[69982]: DEBUG nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1064.769949] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eeec804-058b-46c6-8fc3-b145bf38c3bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.085676] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865217, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599768} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.085676] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3/68c212df-c7a0-45c5-a00c-f94c6a9a9bb3.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1065.085676] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1065.085676] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dc010427-8d03-4293-9d9f-a5c3fdd9bf2c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.103017] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865218, 'name': ReconfigVM_Task, 'duration_secs': 0.246734} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.108074] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1065.112204] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1065.112204] env[69982]: value = "task-3865219" [ 1065.112204] env[69982]: _type = "Task" [ 1065.112204] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.124904] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865219, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.147759] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1065.182543] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.182791] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.183033] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.183240] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.183383] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.183525] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.183731] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.186536] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.186536] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.186536] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.186536] env[69982]: DEBUG nova.virt.hardware [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.186536] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.089s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.187919] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f30d02-e234-4b01-8160-38a49139847e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.191614] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.016s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.191865] env[69982]: DEBUG nova.objects.instance [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'resources' on Instance uuid 331f218a-ad6b-4417-b56d-83113e0c92cb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.200603] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97b4815-869f-4867-87f9-72ea8560394c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.219378] env[69982]: DEBUG nova.network.neutron [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.226227] env[69982]: INFO nova.scheduler.client.report [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Deleted allocations for instance 187cbba3-3700-4c40-a514-28e08ea13426 [ 1065.256191] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1065.290938] env[69982]: INFO nova.compute.manager [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Took 37.06 seconds to build instance. [ 1065.615719] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.616121] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.616235] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.616431] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.616573] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.616717] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.616924] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.617965] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.618297] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.618562] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.620513] env[69982]: DEBUG nova.virt.hardware [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.626464] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1065.627394] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ab17ad2-30ad-40aa-9bd2-f393f2cee24c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.653985] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865219, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12676} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.655632] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1065.656068] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1065.656068] env[69982]: value = "task-3865220" [ 1065.656068] env[69982]: _type = "Task" [ 1065.656068] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.656818] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d9cfa2-cb5d-47b3-aa9c-58cde7ef6662 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.671693] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.690803] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3/68c212df-c7a0-45c5-a00c-f94c6a9a9bb3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1065.691298] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f896d76c-81fe-4208-aae1-e148a8848f8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.723192] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.723635] env[69982]: DEBUG nova.objects.instance [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'migration_context' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.725454] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1065.725454] env[69982]: value = "task-3865221" [ 1065.725454] env[69982]: _type = "Task" [ 1065.725454] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.748024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7a7c0900-a3b6-4fb5-a844-9c99b84f6f8e tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.416s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.753235] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865221, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.754380] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a5441bd-258c-4929-b3d0-ac14324624b9 tempest-ImagesOneServerNegativeTestJSON-2022974886 tempest-ImagesOneServerNegativeTestJSON-2022974886-project-member] Lock "187cbba3-3700-4c40-a514-28e08ea13426" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.778459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.793620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6a70c96c-afd0-4787-9705-12af48191a6a tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.576s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.994581] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.994840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.995204] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1065.995324] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.995404] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.998247] env[69982]: INFO nova.compute.manager [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Terminating instance [ 1066.101611] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e358e7f-0e01-408b-b639-e7ae890e5bab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.112778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c8e6a2-032a-4813-bbe2-827c6d2c2675 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.173390] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0135ae6c-34dd-4166-aa2a-e08bebcc3bb7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.191763] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a84cfd-c9de-4d00-8c79-8a9d4aed9e3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.199135] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865220, 'name': ReconfigVM_Task, 'duration_secs': 0.275482} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.199879] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1066.201861] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5892fdc0-4021-4026-ba6f-dbe3b3c62ae8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.219477] env[69982]: DEBUG nova.compute.provider_tree [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.237647] env[69982]: DEBUG nova.objects.base [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1066.246129] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.247840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4534eb1f-5355-4b00-b565-48e9e664a3b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.254918] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56329327-9d92-4d75-a241-37af7cfe3a95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.275165] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865221, 'name': ReconfigVM_Task, 'duration_secs': 0.533522} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.289442] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Reconfigured VM instance instance-0000005c to attach disk [datastore2] 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3/68c212df-c7a0-45c5-a00c-f94c6a9a9bb3.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1066.290619] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1066.290619] env[69982]: value = "task-3865222" [ 1066.290619] env[69982]: _type = "Task" [ 1066.290619] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.291505] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b3e460e-4b6e-477d-ad35-28bfb59448f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.293566] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b5edd2f-731f-44d7-8557-c8e7cc66262c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.306047] env[69982]: DEBUG oslo_vmware.api [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1066.306047] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52498ec3-ddac-de71-fff4-193af9bbf63f" [ 1066.306047] env[69982]: _type = "Task" [ 1066.306047] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.312130] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1066.312130] env[69982]: value = "task-3865223" [ 1066.312130] env[69982]: _type = "Task" [ 1066.312130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.312840] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.319530] env[69982]: DEBUG nova.compute.manager [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Received event network-vif-plugged-ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1066.319669] env[69982]: DEBUG oslo_concurrency.lockutils [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] Acquiring lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.320040] env[69982]: DEBUG oslo_concurrency.lockutils [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.320289] env[69982]: DEBUG oslo_concurrency.lockutils [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.320504] env[69982]: DEBUG nova.compute.manager [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] No waiting events found dispatching network-vif-plugged-ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1066.320729] env[69982]: WARNING nova.compute.manager [req-4ae78877-8969-45e1-81f7-60a273d727d7 req-b9ae3278-2ccf-49bd-abf3-fe924b05fb30 service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Received unexpected event network-vif-plugged-ba417169-23c9-4d5d-86ad-2ab83634ddb2 for instance with vm_state building and task_state spawning. [ 1066.333372] env[69982]: DEBUG oslo_vmware.api [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52498ec3-ddac-de71-fff4-193af9bbf63f, 'name': SearchDatastore_Task, 'duration_secs': 0.010955} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.337211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.337335] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865223, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.367106] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.367520] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.367833] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.368110] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.368364] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.370950] env[69982]: INFO nova.compute.manager [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Terminating instance [ 1066.398292] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Successfully updated port: ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1066.461371] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.461990] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.462132] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.462319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.462492] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.465143] env[69982]: INFO nova.compute.manager [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Terminating instance [ 1066.502914] env[69982]: DEBUG nova.compute.manager [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1066.504229] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.504229] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80912ad5-f871-4d81-8553-6568ed5cbb49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.515405] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.515647] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-004dc40d-8ce9-498a-aa4b-3673b9fd0f8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.523593] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1066.523593] env[69982]: value = "task-3865224" [ 1066.523593] env[69982]: _type = "Task" [ 1066.523593] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.532826] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865224, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.727110] env[69982]: DEBUG nova.scheduler.client.report [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1066.807733] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.830393] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865223, 'name': Rename_Task, 'duration_secs': 0.271642} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.831946] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.831946] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f47215e9-9cd3-49e2-b1c4-1990382477ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.839104] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1066.839104] env[69982]: value = "task-3865225" [ 1066.839104] env[69982]: _type = "Task" [ 1066.839104] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.849815] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.875972] env[69982]: DEBUG nova.compute.manager [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1066.876438] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.878100] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363ac6df-329e-42a6-a559-365cdccc6226 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.887122] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.887504] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-646465ff-503f-4c4f-9112-1feb3a146351 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.895692] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1066.895692] env[69982]: value = "task-3865226" [ 1066.895692] env[69982]: _type = "Task" [ 1066.895692] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.901101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.901321] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquired lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.901492] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1066.910333] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.969103] env[69982]: DEBUG nova.compute.manager [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1066.969383] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.970827] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-593f1a80-c04a-4c21-95f7-3fe7174f3eae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.981051] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1066.981338] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0e90dfb-98d6-40b1-bb0a-4e69a833d0a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.990699] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1066.990699] env[69982]: value = "task-3865227" [ 1066.990699] env[69982]: _type = "Task" [ 1066.990699] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.003060] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.036345] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865224, 'name': PowerOffVM_Task, 'duration_secs': 0.324432} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.036791] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.037096] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.037472] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8df651b-f4f2-42d0-adf4-218f66fccfae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.112424] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1067.112708] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1067.112946] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore1] 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.113229] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5988fb5-42bb-4547-8214-b97f3d9a1ad6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.120869] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1067.120869] env[69982]: value = "task-3865229" [ 1067.120869] env[69982]: _type = "Task" [ 1067.120869] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.130460] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.233157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.041s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.236039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.069s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.236039] env[69982]: DEBUG nova.objects.instance [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'resources' on Instance uuid a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.260466] env[69982]: INFO nova.scheduler.client.report [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted allocations for instance 331f218a-ad6b-4417-b56d-83113e0c92cb [ 1067.309590] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865222, 'name': ReconfigVM_Task, 'duration_secs': 0.590917} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.309590] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Reconfigured VM instance instance-00000054 to attach disk [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b/fc46eca6-6a60-477c-98de-a2e1f6c7e88b.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.309590] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.350652] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865225, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.416020] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865226, 'name': PowerOffVM_Task, 'duration_secs': 0.195199} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.416792] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.416792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.417316] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3ed3a87-7249-466e-a4af-766b765806e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.473368] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1067.485508] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1067.485695] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1067.485880] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleting the datastore file [datastore2] 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.486182] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-834303b2-f3f9-4626-82e9-09ef0768f2ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.497543] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1067.497543] env[69982]: value = "task-3865231" [ 1067.497543] env[69982]: _type = "Task" [ 1067.497543] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.509307] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865227, 'name': PowerOffVM_Task, 'duration_secs': 0.255152} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.510213] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1067.510497] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1067.510820] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91c4014c-2a1a-4b6a-8e19-ff6c43bbe06f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.515837] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.588361] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1067.588884] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1067.589255] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleting the datastore file [datastore2] 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1067.589583] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cd0fec1d-94dc-4171-91ea-31c249ab0185 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.601032] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1067.601032] env[69982]: value = "task-3865233" [ 1067.601032] env[69982]: _type = "Task" [ 1067.601032] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.610860] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.635479] env[69982]: DEBUG oslo_vmware.api [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193773} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.636114] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1067.636203] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1067.636417] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1067.636641] env[69982]: INFO nova.compute.manager [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1067.636930] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.637202] env[69982]: DEBUG nova.compute.manager [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1067.637368] env[69982]: DEBUG nova.network.neutron [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1067.746514] env[69982]: DEBUG nova.network.neutron [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Updating instance_info_cache with network_info: [{"id": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "address": "fa:16:3e:df:46:00", "network": {"id": "95d7c315-5bf6-48a5-ad15-398a33fba350", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2139885841-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5abc674cd29242f3b13fa1555f2a2877", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba417169-23", "ovs_interfaceid": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.770651] env[69982]: DEBUG oslo_concurrency.lockutils [None req-606bc0fb-063d-49b6-b4bb-7f37d6b91228 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "331f218a-ad6b-4417-b56d-83113e0c92cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.377s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.819905] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d1dfeb-e641-4b05-ae99-4cba0cb7a3f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.854410] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bded43b5-b7eb-46fa-ba7f-528196bcf11e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.864573] env[69982]: DEBUG oslo_vmware.api [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865225, 'name': PowerOnVM_Task, 'duration_secs': 0.573164} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.882335] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.882633] env[69982]: INFO nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Took 9.67 seconds to spawn the instance on the hypervisor. [ 1067.882841] env[69982]: DEBUG nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.885353] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1067.892076] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fce06b-08b9-4980-9545-a72503cbe9ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.017194] env[69982]: DEBUG oslo_vmware.api [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161394} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.017927] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.018084] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.018306] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.018655] env[69982]: INFO nova.compute.manager [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1068.018779] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.019296] env[69982]: DEBUG nova.compute.manager [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1068.019296] env[69982]: DEBUG nova.network.neutron [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1068.112455] env[69982]: DEBUG oslo_vmware.api [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174166} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.113899] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1068.113899] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1068.113899] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1068.113899] env[69982]: INFO nova.compute.manager [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1068.113899] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.114192] env[69982]: DEBUG nova.compute.manager [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1068.114192] env[69982]: DEBUG nova.network.neutron [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1068.227911] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3083bb6-515a-4d4f-80a4-753ed19823e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.243141] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1068.244147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30457ac-a730-4d46-b0c1-d4dd1896f6bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.248814] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc725bb-df26-4a65-bd74-2ad653aed60f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.252114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Releasing lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.252442] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Instance network_info: |[{"id": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "address": "fa:16:3e:df:46:00", "network": {"id": "95d7c315-5bf6-48a5-ad15-398a33fba350", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2139885841-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5abc674cd29242f3b13fa1555f2a2877", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba417169-23", "ovs_interfaceid": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1068.253091] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:46:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60badc2d-69d2-467d-a92e-98511f5cb0b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba417169-23c9-4d5d-86ad-2ab83634ddb2', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1068.260554] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Creating folder: Project (5abc674cd29242f3b13fa1555f2a2877). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.261610] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f4304e5-6cbc-4c7e-85bb-36d096f2867f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.291512] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1068.291687] env[69982]: ERROR oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk due to incomplete transfer. [ 1068.292885] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13125445-70fe-4055-9861-f113dbf460c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.295524] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-96aac739-fbf8-48bc-b7d7-f86731d691e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.304564] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ea463c-f9d5-46e7-95c7-3dfcc6b20913 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.308733] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Created folder: Project (5abc674cd29242f3b13fa1555f2a2877) in parent group-v767796. [ 1068.308920] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Creating folder: Instances. Parent ref: group-v768055. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1068.310512] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6accc29-72c7-4016-9e7f-e4c88b464aa0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.312401] env[69982]: DEBUG oslo_vmware.rw_handles [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52227de7-02e2-2ff3-835f-863b464b4871/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1068.312743] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Uploaded image 07979a20-baf7-482a-918a-853610d09226 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1068.315287] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1068.315909] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-cd7a2cae-a73d-4a2b-97ea-90439ef73073 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.327253] env[69982]: DEBUG nova.compute.provider_tree [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.329916] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Created folder: Instances in parent group-v768055. [ 1068.330067] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1068.331395] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1068.331706] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1068.331706] env[69982]: value = "task-3865236" [ 1068.331706] env[69982]: _type = "Task" [ 1068.331706] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.331905] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5df8b601-5ad1-4a91-bd84-941efbffa7d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.360201] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865236, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.363129] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1068.363129] env[69982]: value = "task-3865237" [ 1068.363129] env[69982]: _type = "Task" [ 1068.363129] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.365531] env[69982]: DEBUG nova.compute.manager [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Received event network-changed-ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.365531] env[69982]: DEBUG nova.compute.manager [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Refreshing instance network info cache due to event network-changed-ba417169-23c9-4d5d-86ad-2ab83634ddb2. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1068.365531] env[69982]: DEBUG oslo_concurrency.lockutils [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] Acquiring lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.365531] env[69982]: DEBUG oslo_concurrency.lockutils [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] Acquired lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.365531] env[69982]: DEBUG nova.network.neutron [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Refreshing network info cache for port ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1068.378119] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865237, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.410369] env[69982]: INFO nova.compute.manager [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Took 39.16 seconds to build instance. [ 1068.526158] env[69982]: DEBUG nova.network.neutron [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Port 35894e50-7421-402a-91f6-e5a640cebd85 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1068.737022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "8518f3c8-738d-468a-9f57-de50e4e67108" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.737022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.737022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.737022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.737441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.739276] env[69982]: INFO nova.compute.manager [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Terminating instance [ 1068.768175] env[69982]: DEBUG nova.compute.manager [req-ba56e6eb-7167-4996-990a-11f1157aee51 req-e51b3e47-ef35-4be3-a383-d2139c0d254e service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Received event network-vif-deleted-099b128a-c69d-49a3-8dea-548476244e6f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1068.768465] env[69982]: INFO nova.compute.manager [req-ba56e6eb-7167-4996-990a-11f1157aee51 req-e51b3e47-ef35-4be3-a383-d2139c0d254e service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Neutron deleted interface 099b128a-c69d-49a3-8dea-548476244e6f; detaching it from the instance and deleting it from the info cache [ 1068.768557] env[69982]: DEBUG nova.network.neutron [req-ba56e6eb-7167-4996-990a-11f1157aee51 req-e51b3e47-ef35-4be3-a383-d2139c0d254e service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.833209] env[69982]: DEBUG nova.scheduler.client.report [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.862790] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865236, 'name': Destroy_Task, 'duration_secs': 0.378493} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.864436] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Destroyed the VM [ 1068.866877] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1068.872050] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ea405f77-b499-474e-9f60-ce9d13cd41c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.889051] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865237, 'name': CreateVM_Task, 'duration_secs': 0.379175} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.892617] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.893036] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1068.893036] env[69982]: value = "task-3865239" [ 1068.893036] env[69982]: _type = "Task" [ 1068.893036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.893794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.893985] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.894913] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.894913] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c125787-17f9-4c32-8ce9-8c41fa100fa3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.904985] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1068.904985] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52912b00-4274-b320-0316-cf52028ffa63" [ 1068.904985] env[69982]: _type = "Task" [ 1068.904985] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.908098] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865239, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.912227] env[69982]: DEBUG oslo_concurrency.lockutils [None req-52c5d2fe-de30-4479-ab0b-433a12063a13 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.667s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.917724] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52912b00-4274-b320-0316-cf52028ffa63, 'name': SearchDatastore_Task, 'duration_secs': 0.010151} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.917979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.918265] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1068.918511] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.918657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.918841] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.919135] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d419a3ee-cd6d-4bf4-80f1-c1e09ede9345 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.970132] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.970132] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1068.970132] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49a860b9-c73a-4858-8587-c7bcb3674804 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.970132] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1068.970132] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e4de25-b99a-0020-d5b4-2b2d49c6afe0" [ 1068.970132] env[69982]: _type = "Task" [ 1068.970132] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.970132] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e4de25-b99a-0020-d5b4-2b2d49c6afe0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.038839] env[69982]: DEBUG nova.network.neutron [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.075123] env[69982]: DEBUG nova.network.neutron [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.152418] env[69982]: DEBUG nova.network.neutron [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.222176] env[69982]: DEBUG nova.network.neutron [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Updated VIF entry in instance network info cache for port ba417169-23c9-4d5d-86ad-2ab83634ddb2. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1069.222240] env[69982]: DEBUG nova.network.neutron [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Updating instance_info_cache with network_info: [{"id": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "address": "fa:16:3e:df:46:00", "network": {"id": "95d7c315-5bf6-48a5-ad15-398a33fba350", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2139885841-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5abc674cd29242f3b13fa1555f2a2877", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba417169-23", "ovs_interfaceid": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.245685] env[69982]: DEBUG nova.compute.manager [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1069.245685] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1069.246798] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ac12f9-0a95-471f-a0a2-56b344e36c98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.257014] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.258043] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a3000a1-3f2a-45ec-a668-7185910bb573 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.266443] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1069.266443] env[69982]: value = "task-3865240" [ 1069.266443] env[69982]: _type = "Task" [ 1069.266443] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.272388] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95042063-0ef2-4a14-95a4-919b75bdf8bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.279026] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.287057] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e612811d-a8e9-4668-9c2d-ea2a131229a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.321698] env[69982]: DEBUG nova.compute.manager [req-ba56e6eb-7167-4996-990a-11f1157aee51 req-e51b3e47-ef35-4be3-a383-d2139c0d254e service nova] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Detach interface failed, port_id=099b128a-c69d-49a3-8dea-548476244e6f, reason: Instance 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1069.345548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.349840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.682s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.351576] env[69982]: INFO nova.compute.claims [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1069.376025] env[69982]: INFO nova.scheduler.client.report [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae [ 1069.408509] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865239, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.460471] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e4de25-b99a-0020-d5b4-2b2d49c6afe0, 'name': SearchDatastore_Task, 'duration_secs': 0.011992} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.461552] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8e55b2-c1f2-439e-89e3-3b2cf8a92cf0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.467825] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1069.467825] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52311b2a-59b2-43c5-afa5-a589b7871312" [ 1069.467825] env[69982]: _type = "Task" [ 1069.467825] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.477241] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52311b2a-59b2-43c5-afa5-a589b7871312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.543711] env[69982]: INFO nova.compute.manager [-] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Took 1.91 seconds to deallocate network for instance. [ 1069.553326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.555067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.555067] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.577542] env[69982]: INFO nova.compute.manager [-] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Took 1.56 seconds to deallocate network for instance. [ 1069.656872] env[69982]: INFO nova.compute.manager [-] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Took 1.54 seconds to deallocate network for instance. [ 1069.728567] env[69982]: DEBUG oslo_concurrency.lockutils [req-760a7c82-e099-422a-bedb-bb83b294859d req-bfdaa805-cee7-4fa3-a8ae-34f3aa8f86ba service nova] Releasing lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.777905] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865240, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.884885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-beb69d1e-de58-4767-b4a7-304c14259f27 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.732s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.907922] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865239, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.979931] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52311b2a-59b2-43c5-afa5-a589b7871312, 'name': SearchDatastore_Task, 'duration_secs': 0.010808} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.980265] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.980549] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] dffd4365-d2b6-4201-be46-a823399bb2a6/dffd4365-d2b6-4201-be46-a823399bb2a6.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.980839] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a4de238-ff9e-457c-92c2-8f52d561f6f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.992381] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1069.992381] env[69982]: value = "task-3865241" [ 1069.992381] env[69982]: _type = "Task" [ 1069.992381] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.999705] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865241, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.062117] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.084979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.163899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.283879] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865240, 'name': PowerOffVM_Task, 'duration_secs': 0.642151} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.284602] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.285016] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.285556] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4c3732c-c8d3-45bf-8f93-af414e9b3bcc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.353544] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.354020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.354614] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleting the datastore file [datastore2] 8518f3c8-738d-468a-9f57-de50e4e67108 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.355081] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2b0d075-50f7-4006-8c1c-ad4e88f14c13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.364700] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1070.364700] env[69982]: value = "task-3865243" [ 1070.364700] env[69982]: _type = "Task" [ 1070.364700] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.376131] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.408504] env[69982]: DEBUG nova.compute.manager [req-748f0415-9712-41f1-ba88-d5b997f81f49 req-456e87b9-14c4-43ed-b813-b3956fd1c612 service nova] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Received event network-vif-deleted-18a2c6c8-1313-42eb-a058-40e272e7fda3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.408897] env[69982]: DEBUG nova.compute.manager [req-748f0415-9712-41f1-ba88-d5b997f81f49 req-456e87b9-14c4-43ed-b813-b3956fd1c612 service nova] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Received event network-vif-deleted-9206fa5a-6a33-4a8f-aeee-827c3f1d0b46 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.413805] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865239, 'name': RemoveSnapshot_Task, 'duration_secs': 1.166062} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.415131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1070.415230] env[69982]: DEBUG nova.compute.manager [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.420040] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5362fc2-a3c0-4a01-b819-82dfd2afac5e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.503044] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865241, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493288} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.504380] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] dffd4365-d2b6-4201-be46-a823399bb2a6/dffd4365-d2b6-4201-be46-a823399bb2a6.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.504380] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.504380] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f28b67a8-78a8-400d-af4d-58f5bbbcdbc9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.511694] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1070.511694] env[69982]: value = "task-3865244" [ 1070.511694] env[69982]: _type = "Task" [ 1070.511694] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.523243] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.599044] env[69982]: DEBUG nova.compute.manager [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.599354] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce23434e-020a-4f87-803b-7cc8cbe8970f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.614362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.614362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.614362] env[69982]: DEBUG nova.network.neutron [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1070.768560] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23354a64-05da-4747-a4e7-e65590f3eeab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.777753] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab7d43e-8755-4a2e-8f6c-4c2eea79a9aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.811061] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed0ffa1-4472-4e09-9642-26134f14efcd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.820344] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91d77bd-3069-4286-baf8-b632894c3cf0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.836799] env[69982]: DEBUG nova.compute.provider_tree [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.876176] env[69982]: DEBUG oslo_vmware.api [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244236} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.876450] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1070.876652] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1070.876810] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1070.877119] env[69982]: INFO nova.compute.manager [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1070.877778] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1070.877778] env[69982]: DEBUG nova.compute.manager [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1070.877778] env[69982]: DEBUG nova.network.neutron [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1070.935817] env[69982]: INFO nova.compute.manager [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Shelve offloading [ 1071.023141] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073812} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.025217] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1071.025217] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019ae17d-357a-4816-ae50-bd4ae5d18177 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.052461] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] dffd4365-d2b6-4201-be46-a823399bb2a6/dffd4365-d2b6-4201-be46-a823399bb2a6.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.052867] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92f32756-5468-405d-8f72-d9d4acc19b83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.077199] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1071.077199] env[69982]: value = "task-3865245" [ 1071.077199] env[69982]: _type = "Task" [ 1071.077199] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.088197] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.115665] env[69982]: INFO nova.compute.manager [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] instance snapshotting [ 1071.122746] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d53d4e9-bae2-43fa-8c60-508ea3d1e7dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.147022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b7f656-0772-4ca9-9e53-c0c1a4371a64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.215963] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.216372] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.341991] env[69982]: DEBUG nova.scheduler.client.report [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1071.439546] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.440120] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ab99b3e3-51aa-4a12-b476-9f707dbf2fae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.450431] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1071.450431] env[69982]: value = "task-3865246" [ 1071.450431] env[69982]: _type = "Task" [ 1071.450431] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.460726] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1071.460910] env[69982]: DEBUG nova.compute.manager [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1071.462098] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74210d7d-e7d3-4aae-bb93-16dc38a49ccd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.468859] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.469054] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.469237] env[69982]: DEBUG nova.network.neutron [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1071.584482] env[69982]: DEBUG nova.network.neutron [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [{"id": "35894e50-7421-402a-91f6-e5a640cebd85", "address": "fa:16:3e:fe:6f:b0", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35894e50-74", "ovs_interfaceid": "35894e50-7421-402a-91f6-e5a640cebd85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.590875] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865245, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.660684] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1071.661094] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7820053e-02c8-4a43-9038-899cbe434858 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.670754] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1071.670754] env[69982]: value = "task-3865247" [ 1071.670754] env[69982]: _type = "Task" [ 1071.670754] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.683277] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865247, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.719701] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1071.851378] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.500s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.851378] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1071.856916] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.158s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.857192] env[69982]: DEBUG nova.objects.instance [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lazy-loading 'resources' on Instance uuid 9123b08c-d2ec-4c4d-bade-0acdae75640a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1071.989230] env[69982]: DEBUG nova.compute.manager [req-d18c1818-163d-4f6a-b844-1ad6f805e6ff req-f3366254-a2b2-447a-8193-dee4eb7d0054 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Received event network-vif-deleted-73909075-891e-4fc4-a912-c3757fcda156 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1071.989584] env[69982]: INFO nova.compute.manager [req-d18c1818-163d-4f6a-b844-1ad6f805e6ff req-f3366254-a2b2-447a-8193-dee4eb7d0054 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Neutron deleted interface 73909075-891e-4fc4-a912-c3757fcda156; detaching it from the instance and deleting it from the info cache [ 1071.990028] env[69982]: DEBUG nova.network.neutron [req-d18c1818-163d-4f6a-b844-1ad6f805e6ff req-f3366254-a2b2-447a-8193-dee4eb7d0054 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.091570] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865245, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.091570] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.182017] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865247, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.220118] env[69982]: DEBUG nova.network.neutron [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.246432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.361452] env[69982]: DEBUG nova.compute.utils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1072.368448] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1072.368448] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1072.387282] env[69982]: DEBUG nova.network.neutron [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.435540] env[69982]: DEBUG nova.policy [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1072.493235] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63c66257-bea2-4104-b816-c3ec149d6422 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.511325] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a1cb06-5469-4cd2-ba1b-ff581852858b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.554927] env[69982]: DEBUG nova.compute.manager [req-d18c1818-163d-4f6a-b844-1ad6f805e6ff req-f3366254-a2b2-447a-8193-dee4eb7d0054 service nova] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Detach interface failed, port_id=73909075-891e-4fc4-a912-c3757fcda156, reason: Instance 8518f3c8-738d-468a-9f57-de50e4e67108 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1072.590732] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865245, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.619704] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9285161f-c23f-4474-bc5e-d2685198d535 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.643061] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fab5b08-60e4-4855-816b-109051158eda {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.651507] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.681543] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865247, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.723123] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.772849] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Successfully created port: c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1072.817658] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d1d795-59f3-46b3-a0b6-5ebdb265f9bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.827497] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca9904c-3703-43fa-b0b6-93634aba7ade {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.867536] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1072.873444] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690449ef-7742-46cb-91f7-ec7c6b49fe08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.883017] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cacda10-4174-4b3c-bacc-bdfa766d64e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.901526] env[69982]: INFO nova.compute.manager [-] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Took 2.02 seconds to deallocate network for instance. [ 1072.902101] env[69982]: DEBUG nova.compute.provider_tree [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1073.090539] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865245, 'name': ReconfigVM_Task, 'duration_secs': 1.524307} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.090844] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Reconfigured VM instance instance-0000005d to attach disk [datastore1] dffd4365-d2b6-4201-be46-a823399bb2a6/dffd4365-d2b6-4201-be46-a823399bb2a6.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1073.092043] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f63b57fd-9311-4958-aeb9-a4c4a159490f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.101226] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1073.101226] env[69982]: value = "task-3865248" [ 1073.101226] env[69982]: _type = "Task" [ 1073.101226] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.110906] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865248, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.162228] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.162712] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45ffe98a-f361-4019-9b4a-f1f9675110c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.168262] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.169287] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83037a9-3cd9-431e-9c27-d9c062408752 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.174744] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1073.174744] env[69982]: value = "task-3865249" [ 1073.174744] env[69982]: _type = "Task" [ 1073.174744] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.184814] env[69982]: DEBUG nova.compute.manager [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-vif-unplugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1073.185121] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.185389] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.185615] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.185820] env[69982]: DEBUG nova.compute.manager [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] No waiting events found dispatching network-vif-unplugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1073.186023] env[69982]: WARNING nova.compute.manager [req-2c140e48-877c-4afe-9d3a-1e940fee5548 req-d80bdca5-3541-4dbb-9f18-5061c0fd2dfd service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received unexpected event network-vif-unplugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 for instance with vm_state shelved and task_state shelving_offloading. [ 1073.195575] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.195911] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865247, 'name': CreateSnapshot_Task, 'duration_secs': 1.415653} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.196165] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96eedbe5-1e3f-4d0c-a54c-cbbba0e87366 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.198133] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1073.203738] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbccb69-84f2-4fec-b95e-be9fda211c5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.207568] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865249, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.295029] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.295222] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.295464] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleting the datastore file [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.295791] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a5d9af0-8b44-4212-9b36-d596079aa7c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.303647] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1073.303647] env[69982]: value = "task-3865251" [ 1073.303647] env[69982]: _type = "Task" [ 1073.303647] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.314254] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.407485] env[69982]: DEBUG nova.scheduler.client.report [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.414857] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.460930] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.461258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.461504] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.461733] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.462019] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.464316] env[69982]: INFO nova.compute.manager [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Terminating instance [ 1073.611926] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865248, 'name': Rename_Task, 'duration_secs': 0.189671} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.612305] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1073.612557] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e013d138-6007-4c17-b14c-4863d5117e5f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.619380] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1073.619380] env[69982]: value = "task-3865252" [ 1073.619380] env[69982]: _type = "Task" [ 1073.619380] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.628019] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865252, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.692348] env[69982]: DEBUG oslo_vmware.api [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865249, 'name': PowerOnVM_Task, 'duration_secs': 0.420079} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.692705] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.692959] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fc43b323-e2fb-4b97-b412-f87fbcd8160e tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance 'fc46eca6-6a60-477c-98de-a2e1f6c7e88b' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1073.726948] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1073.727702] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6b0d6c04-0033-4f02-a962-e0c31782e991 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.738341] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1073.738341] env[69982]: value = "task-3865253" [ 1073.738341] env[69982]: _type = "Task" [ 1073.738341] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.751813] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865253, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.815037] env[69982]: DEBUG oslo_vmware.api [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175126} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.815280] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.815502] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.815689] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.882422] env[69982]: INFO nova.scheduler.client.report [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted allocations for instance ad43c35a-69bc-4c84-8869-cfde6f516b9b [ 1073.887049] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1073.914045] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.055s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.920413] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.136s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.920910] env[69982]: DEBUG nova.objects.instance [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lazy-loading 'resources' on Instance uuid 00f8efe0-28ad-4d95-b931-a31de0c03bd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.935720] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1073.936066] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.936414] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1073.936524] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.937779] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1073.937779] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1073.937779] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1073.937779] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1073.937779] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1073.938097] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1073.938097] env[69982]: DEBUG nova.virt.hardware [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1073.938619] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420848eb-f1b7-4407-87d5-1ff7a0aa3316 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.945392] env[69982]: INFO nova.scheduler.client.report [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Deleted allocations for instance 9123b08c-d2ec-4c4d-bade-0acdae75640a [ 1073.952651] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08455731-f6af-4aa2-a43a-0c15e1f99a96 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.971353] env[69982]: DEBUG nova.compute.manager [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1073.971687] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.974949] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f115871-6faa-42dc-b241-041bc19b0cf5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.982752] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.983070] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-621cd6a0-009f-4549-8596-099df15796d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.991509] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1073.991509] env[69982]: value = "task-3865254" [ 1073.991509] env[69982]: _type = "Task" [ 1073.991509] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.005569] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.136310] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865252, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.252703] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865253, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.391777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.466252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b86f56c0-23a4-4cd8-b6c4-54ad64d5f50d tempest-ServersAdminTestJSON-63150947 tempest-ServersAdminTestJSON-63150947-project-member] Lock "9123b08c-d2ec-4c4d-bade-0acdae75640a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.411s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.505866] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865254, 'name': PowerOffVM_Task, 'duration_secs': 0.355872} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.510415] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1074.510776] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1074.511423] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57fc2bae-575a-472e-891e-bd7811ee42f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.523035] env[69982]: DEBUG nova.compute.manager [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Received event network-vif-plugged-c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1074.523575] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] Acquiring lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.523990] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.524527] env[69982]: DEBUG oslo_concurrency.lockutils [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.525812] env[69982]: DEBUG nova.compute.manager [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] No waiting events found dispatching network-vif-plugged-c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1074.525812] env[69982]: WARNING nova.compute.manager [req-ae7a1ed9-ec92-4908-9c55-48bc7fff1911 req-b7bfb991-5f68-441f-8c8e-8c3cda4ef64c service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Received unexpected event network-vif-plugged-c6f21ecb-5489-44f1-8ffa-8c7b630004a3 for instance with vm_state building and task_state spawning. [ 1074.592481] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1074.594229] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1074.594430] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleting the datastore file [datastore1] d43e5e7a-577d-4fe9-aff7-9012adfbdb9a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1074.595148] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9f99419-9b5e-431f-ac73-ef4c6d121f64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.606074] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1074.606074] env[69982]: value = "task-3865256" [ 1074.606074] env[69982]: _type = "Task" [ 1074.606074] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.617320] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865256, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.634259] env[69982]: DEBUG oslo_vmware.api [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865252, 'name': PowerOnVM_Task, 'duration_secs': 0.657416} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.634556] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1074.634761] env[69982]: INFO nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Took 9.49 seconds to spawn the instance on the hypervisor. [ 1074.634946] env[69982]: DEBUG nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1074.635813] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c834ab7f-305f-4013-904a-57f5970857ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.745360] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Successfully updated port: c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1074.759636] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865253, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.887765] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f588fc29-df83-43f6-b014-1c0f075a1599 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.897243] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dad1fe1-968f-45e4-9b48-9411b321f3b0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.937030] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0e688f-924f-4c67-9c69-d1660c593d22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.945317] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a417f28-7bd8-4b65-9835-b8ecc5f33831 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.965703] env[69982]: DEBUG nova.compute.provider_tree [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.118032] env[69982]: DEBUG oslo_vmware.api [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865256, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224776} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.118032] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1075.118032] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1075.118032] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1075.118032] env[69982]: INFO nova.compute.manager [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1075.118325] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1075.118325] env[69982]: DEBUG nova.compute.manager [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1075.118325] env[69982]: DEBUG nova.network.neutron [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1075.178286] env[69982]: INFO nova.compute.manager [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Took 40.44 seconds to build instance. [ 1075.255211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.255211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.255211] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1075.255211] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865253, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.324514] env[69982]: DEBUG nova.compute.manager [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1075.324514] env[69982]: DEBUG nova.compute.manager [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing instance network info cache due to event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1075.324514] env[69982]: DEBUG oslo_concurrency.lockutils [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.324514] env[69982]: DEBUG oslo_concurrency.lockutils [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.324514] env[69982]: DEBUG nova.network.neutron [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1075.469638] env[69982]: DEBUG nova.scheduler.client.report [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.680239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-563f2993-be7d-47b8-a5a5-ca412e0ffb31 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.957s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.751439] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865253, 'name': CloneVM_Task, 'duration_secs': 1.658965} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.751621] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Created linked-clone VM from snapshot [ 1075.752469] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-445fb5fb-6104-4fde-b47a-c3f5e9e1838f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.763123] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Uploading image 68a9ed9b-2fb7-4d99-8416-99699495f3b8 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1075.776872] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1075.777356] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-014ab797-7830-4f16-94d7-3aaeec682136 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.786531] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1075.786531] env[69982]: value = "task-3865257" [ 1075.786531] env[69982]: _type = "Task" [ 1075.786531] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.796796] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865257, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.816611] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1075.950873] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "dffd4365-d2b6-4201-be46-a823399bb2a6" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.951337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.951627] env[69982]: INFO nova.compute.manager [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Rebooting instance [ 1075.977273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.980774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.202s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.982114] env[69982]: INFO nova.compute.claims [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.003934] env[69982]: INFO nova.scheduler.client.report [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Deleted allocations for instance 00f8efe0-28ad-4d95-b931-a31de0c03bd7 [ 1076.005177] env[69982]: DEBUG nova.network.neutron [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.013825] env[69982]: DEBUG nova.network.neutron [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Updating instance_info_cache with network_info: [{"id": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "address": "fa:16:3e:18:ae:2b", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6f21ecb-54", "ovs_interfaceid": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.227646] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.297531] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865257, 'name': Destroy_Task, 'duration_secs': 0.479144} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.297806] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Destroyed the VM [ 1076.298098] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1076.298391] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-83a5a551-745d-46d5-9d6f-0bea63be601e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.307201] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1076.307201] env[69982]: value = "task-3865258" [ 1076.307201] env[69982]: _type = "Task" [ 1076.307201] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.322426] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865258, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.425746] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.426030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.426233] env[69982]: DEBUG nova.compute.manager [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Going to confirm migration 5 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1076.479489] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.479695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquired lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.479855] env[69982]: DEBUG nova.network.neutron [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1076.504355] env[69982]: DEBUG nova.network.neutron [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updated VIF entry in instance network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1076.504747] env[69982]: DEBUG nova.network.neutron [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.510215] env[69982]: INFO nova.compute.manager [-] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Took 1.39 seconds to deallocate network for instance. [ 1076.517439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.517745] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Instance network_info: |[{"id": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "address": "fa:16:3e:18:ae:2b", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6f21ecb-54", "ovs_interfaceid": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1076.519582] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:ae:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6f21ecb-5489-44f1-8ffa-8c7b630004a3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.528980] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.530353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9380a9e9-cf04-4b00-97a2-4e63a94e73b1 tempest-ServerTagsTestJSON-705485851 tempest-ServerTagsTestJSON-705485851-project-member] Lock "00f8efe0-28ad-4d95-b931-a31de0c03bd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.244s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.531962] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1076.531962] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd8546b0-4d05-47a5-88a0-e915344d64c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.558781] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.558781] env[69982]: value = "task-3865259" [ 1076.558781] env[69982]: _type = "Task" [ 1076.558781] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.573604] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865259, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.611381] env[69982]: DEBUG nova.compute.manager [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Received event network-changed-c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1076.611598] env[69982]: DEBUG nova.compute.manager [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Refreshing instance network info cache due to event network-changed-c6f21ecb-5489-44f1-8ffa-8c7b630004a3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1076.611942] env[69982]: DEBUG oslo_concurrency.lockutils [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] Acquiring lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.612399] env[69982]: DEBUG oslo_concurrency.lockutils [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] Acquired lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.613188] env[69982]: DEBUG nova.network.neutron [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Refreshing network info cache for port c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1076.821041] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865258, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.992065] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.992429] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.992657] env[69982]: DEBUG nova.network.neutron [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1076.992932] env[69982]: DEBUG nova.objects.instance [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'info_cache' on Instance uuid fc46eca6-6a60-477c-98de-a2e1f6c7e88b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.007565] env[69982]: DEBUG oslo_concurrency.lockutils [req-e85ce59e-2ef9-4476-a42a-00af5cbef56d req-23c8d814-3b91-486f-b647-6b9f99fcfa87 service nova] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.021052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.069618] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865259, 'name': CreateVM_Task, 'duration_secs': 0.37229} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.072027] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1077.078708] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.078920] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.079287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1077.084427] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac90bc3e-541f-41a6-9213-924c7faeef9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.091508] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1077.091508] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0851b-4b9d-9515-5cc2-bd926e989939" [ 1077.091508] env[69982]: _type = "Task" [ 1077.091508] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.104978] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0851b-4b9d-9515-5cc2-bd926e989939, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.254964] env[69982]: DEBUG nova.network.neutron [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Updating instance_info_cache with network_info: [{"id": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "address": "fa:16:3e:df:46:00", "network": {"id": "95d7c315-5bf6-48a5-ad15-398a33fba350", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2139885841-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5abc674cd29242f3b13fa1555f2a2877", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba417169-23", "ovs_interfaceid": "ba417169-23c9-4d5d-86ad-2ab83634ddb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.322664] env[69982]: DEBUG oslo_vmware.api [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865258, 'name': RemoveSnapshot_Task, 'duration_secs': 0.686639} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.325359] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1077.362041] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c47bf4-aae3-48b1-afad-49a1df937460 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.368777] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd70137-a175-48b8-9630-8e6eee10e2e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.412572] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034fc544-c82f-4693-92cc-1431c87b0c05 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.423374] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131c4b3e-4109-4414-a622-d687c263aa12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.439390] env[69982]: DEBUG nova.compute.provider_tree [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1077.604076] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0851b-4b9d-9515-5cc2-bd926e989939, 'name': SearchDatastore_Task, 'duration_secs': 0.02157} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.604076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.604076] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1077.604246] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.604437] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.607569] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1077.607569] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c9336fe-1763-4ee7-a846-c664ca36010e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.615858] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1077.615858] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1077.616469] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-901eccb8-3062-4577-adc3-d609fa2e27ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.622622] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1077.622622] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527db46e-6411-1c02-2c00-29a4e9c7a7b1" [ 1077.622622] env[69982]: _type = "Task" [ 1077.622622] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.632127] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527db46e-6411-1c02-2c00-29a4e9c7a7b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.699612] env[69982]: DEBUG nova.network.neutron [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Updated VIF entry in instance network info cache for port c6f21ecb-5489-44f1-8ffa-8c7b630004a3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1077.700114] env[69982]: DEBUG nova.network.neutron [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Updating instance_info_cache with network_info: [{"id": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "address": "fa:16:3e:18:ae:2b", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6f21ecb-54", "ovs_interfaceid": "c6f21ecb-5489-44f1-8ffa-8c7b630004a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.763688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Releasing lock "refresh_cache-dffd4365-d2b6-4201-be46-a823399bb2a6" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.835773] env[69982]: WARNING nova.compute.manager [None req-d069a35a-730a-4db9-9f34-ba4d9d0631d6 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Image not found during snapshot: nova.exception.ImageNotFound: Image 68a9ed9b-2fb7-4d99-8416-99699495f3b8 could not be found. [ 1077.943897] env[69982]: DEBUG nova.scheduler.client.report [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.139264] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527db46e-6411-1c02-2c00-29a4e9c7a7b1, 'name': SearchDatastore_Task, 'duration_secs': 0.021555} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.145390] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e366dc18-f9b7-4b6c-96f3-b85f0b9cfe30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.154565] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1078.154565] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4c22c-9ba6-4583-e1b8-5266ad63d95b" [ 1078.154565] env[69982]: _type = "Task" [ 1078.154565] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.166662] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4c22c-9ba6-4583-e1b8-5266ad63d95b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.203897] env[69982]: DEBUG oslo_concurrency.lockutils [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] Releasing lock "refresh_cache-834d66a7-4626-4d85-8e6c-db4a8ec39be0" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.204295] env[69982]: DEBUG nova.compute.manager [req-a493f8aa-2ee2-4346-aefd-401c649c2ac4 req-ff42f1f0-3695-4e3e-9826-5329c5b42504 service nova] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Received event network-vif-deleted-2c91625d-56d6-4706-930e-a55980f4feb1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.248117] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.248395] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.248609] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.248790] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.248956] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.251617] env[69982]: INFO nova.compute.manager [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Terminating instance [ 1078.267782] env[69982]: DEBUG nova.compute.manager [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1078.268673] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035b89c9-f56e-4f25-a5f1-36ed0cf252c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.389062] env[69982]: DEBUG nova.network.neutron [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [{"id": "35894e50-7421-402a-91f6-e5a640cebd85", "address": "fa:16:3e:fe:6f:b0", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap35894e50-74", "ovs_interfaceid": "35894e50-7421-402a-91f6-e5a640cebd85", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.452720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.453356] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1078.456218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 12.119s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.668493] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b4c22c-9ba6-4583-e1b8-5266ad63d95b, 'name': SearchDatastore_Task, 'duration_secs': 0.03205} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.668803] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.669103] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 834d66a7-4626-4d85-8e6c-db4a8ec39be0/834d66a7-4626-4d85-8e6c-db4a8ec39be0.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1078.669381] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6e4486d2-bbca-43a1-95f3-4f90da31bec6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.679123] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1078.679123] env[69982]: value = "task-3865260" [ 1078.679123] env[69982]: _type = "Task" [ 1078.679123] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.688538] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865260, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.759718] env[69982]: DEBUG nova.compute.manager [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1078.760012] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1078.760924] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4dbffd-dd6d-4389-9d7f-4e4723e86a7c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.774099] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1078.774099] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8069f26-1d8d-4946-96f0-f22e4b155aef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.785912] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1078.785912] env[69982]: value = "task-3865261" [ 1078.785912] env[69982]: _type = "Task" [ 1078.785912] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.796884] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.891713] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-fc46eca6-6a60-477c-98de-a2e1f6c7e88b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.893073] env[69982]: DEBUG nova.objects.instance [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'migration_context' on Instance uuid fc46eca6-6a60-477c-98de-a2e1f6c7e88b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.959389] env[69982]: DEBUG nova.compute.utils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1078.964344] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1078.964535] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.193318] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865260, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.291502] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704f21aa-917b-450d-ac80-1334b141add4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.304157] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865261, 'name': PowerOffVM_Task, 'duration_secs': 0.217935} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.309116] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1079.309783] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1079.309942] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Doing hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1079.310323] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87242159-b572-4ff2-8b36-f4a5b867fe43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.313286] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-daa683f7-30e4-41a0-8d26-76e0d199a618 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.320142] env[69982]: DEBUG oslo_vmware.api [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1079.320142] env[69982]: value = "task-3865263" [ 1079.320142] env[69982]: _type = "Task" [ 1079.320142] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.334561] env[69982]: DEBUG oslo_vmware.api [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865263, 'name': ResetVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.339806] env[69982]: DEBUG nova.policy [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1079.373089] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e8bbdd-937f-4241-8b57-f30fb24beb80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.382085] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd398c6-33e0-412a-8648-85b525a7dc4c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.387110] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1079.387340] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1079.387517] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleting the datastore file [datastore2] 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1079.388216] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-912784d6-e4ee-40a2-ae5c-1f904e64993a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.416585] env[69982]: DEBUG nova.objects.base [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1079.420734] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f27cf66-88c5-4642-91c7-563ca2af3002 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.422907] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a77627-5711-45e5-8a1f-c8f9b8909baa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.425938] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for the task: (returnval){ [ 1079.425938] env[69982]: value = "task-3865264" [ 1079.425938] env[69982]: _type = "Task" [ 1079.425938] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.452961] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cc12fa-8085-4e0f-80ef-51b532e5004a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.457017] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e677182b-eebf-4d9a-845c-938195502aa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.462989] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.465377] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1079.477602] env[69982]: DEBUG oslo_vmware.api [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1079.477602] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d7e1d5-d6ca-275a-155f-dc7579b5e916" [ 1079.477602] env[69982]: _type = "Task" [ 1079.477602] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.478108] env[69982]: DEBUG nova.compute.provider_tree [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1079.489168] env[69982]: DEBUG oslo_vmware.api [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d7e1d5-d6ca-275a-155f-dc7579b5e916, 'name': SearchDatastore_Task, 'duration_secs': 0.007794} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.489579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.689083] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865260, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660304} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.689365] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 834d66a7-4626-4d85-8e6c-db4a8ec39be0/834d66a7-4626-4d85-8e6c-db4a8ec39be0.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1079.689885] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1079.689976] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f1430fdf-2f56-42ff-b55b-c1d0ed451282 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.698794] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1079.698794] env[69982]: value = "task-3865265" [ 1079.698794] env[69982]: _type = "Task" [ 1079.698794] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.708270] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.832773] env[69982]: DEBUG oslo_vmware.api [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865263, 'name': ResetVM_Task, 'duration_secs': 0.106447} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.833103] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Did hard reboot of VM {{(pid=69982) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1079.833308] env[69982]: DEBUG nova.compute.manager [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.834416] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2be28c-8f5b-4553-92f4-d7fe137e9ae4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.904868] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Successfully created port: 84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1079.938526] env[69982]: DEBUG oslo_vmware.api [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Task: {'id': task-3865264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.344576} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.938785] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1079.939015] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1079.939249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1079.939522] env[69982]: INFO nova.compute.manager [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1079.939942] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1079.940206] env[69982]: DEBUG nova.compute.manager [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1079.940309] env[69982]: DEBUG nova.network.neutron [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1079.986093] env[69982]: DEBUG nova.scheduler.client.report [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1080.211852] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083414} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.212440] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1080.213164] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25677187-3bbf-4213-99fe-c0d4101880b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.244640] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 834d66a7-4626-4d85-8e6c-db4a8ec39be0/834d66a7-4626-4d85-8e6c-db4a8ec39be0.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.245070] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7901fd1-8b46-485d-a217-899a98fa94a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.276440] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1080.276440] env[69982]: value = "task-3865266" [ 1080.276440] env[69982]: _type = "Task" [ 1080.276440] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.287775] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865266, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.357705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4329a4db-456b-4464-a029-82b63d3dc2e4 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.406s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.474476] env[69982]: DEBUG nova.compute.manager [req-6ba57373-7d3c-4f9c-8ad8-0e02333b6a7b req-b02fe2e5-bb9c-417c-a73b-ee5127947875 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Received event network-vif-deleted-7d9d4676-21b3-4951-a9a4-21f80d05edb0 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.478093] env[69982]: INFO nova.compute.manager [req-6ba57373-7d3c-4f9c-8ad8-0e02333b6a7b req-b02fe2e5-bb9c-417c-a73b-ee5127947875 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Neutron deleted interface 7d9d4676-21b3-4951-a9a4-21f80d05edb0; detaching it from the instance and deleting it from the info cache [ 1080.478093] env[69982]: DEBUG nova.network.neutron [req-6ba57373-7d3c-4f9c-8ad8-0e02333b6a7b req-b02fe2e5-bb9c-417c-a73b-ee5127947875 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.492570] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1080.536518] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.536854] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.537276] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.537532] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.537773] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.537970] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.538255] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.538468] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.538720] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.538952] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.539203] env[69982]: DEBUG nova.virt.hardware [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.540673] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62534aeb-55b5-4f15-9ee3-c2d8135bc08f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.550850] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c561335-2e03-4db1-9a1f-3aeca240a4d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.787477] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865266, 'name': ReconfigVM_Task, 'duration_secs': 0.476258} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.787763] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 834d66a7-4626-4d85-8e6c-db4a8ec39be0/834d66a7-4626-4d85-8e6c-db4a8ec39be0.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.788420] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-370a2c88-81a7-4f51-acc0-d74293452b23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.795872] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1080.795872] env[69982]: value = "task-3865267" [ 1080.795872] env[69982]: _type = "Task" [ 1080.795872] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.804464] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865267, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.909614] env[69982]: DEBUG nova.network.neutron [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.980023] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fa059106-c158-4d28-8564-45dabfa85f33 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.990168] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e376cb68-db91-47c7-8f44-bd081bc7e6f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.004565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.548s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.004988] env[69982]: DEBUG nova.compute.manager [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69982) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1081.008304] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.946s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.008820] env[69982]: DEBUG nova.objects.instance [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'resources' on Instance uuid 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1081.034441] env[69982]: DEBUG nova.compute.manager [req-6ba57373-7d3c-4f9c-8ad8-0e02333b6a7b req-b02fe2e5-bb9c-417c-a73b-ee5127947875 service nova] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Detach interface failed, port_id=7d9d4676-21b3-4951-a9a4-21f80d05edb0, reason: Instance 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1081.065455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "dffd4365-d2b6-4201-be46-a823399bb2a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.066071] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.067357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.067783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.068160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.071153] env[69982]: INFO nova.compute.manager [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Terminating instance [ 1081.306667] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865267, 'name': Rename_Task, 'duration_secs': 0.169178} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.306973] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.307329] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d42b731-e223-4eb3-b793-d8c375d5e49f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.314690] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1081.314690] env[69982]: value = "task-3865268" [ 1081.314690] env[69982]: _type = "Task" [ 1081.314690] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.324835] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.413257] env[69982]: INFO nova.compute.manager [-] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Took 1.47 seconds to deallocate network for instance. [ 1081.437724] env[69982]: DEBUG nova.compute.manager [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-vif-plugged-84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.437876] env[69982]: DEBUG oslo_concurrency.lockutils [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.438153] env[69982]: DEBUG oslo_concurrency.lockutils [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.438359] env[69982]: DEBUG oslo_concurrency.lockutils [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.438683] env[69982]: DEBUG nova.compute.manager [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] No waiting events found dispatching network-vif-plugged-84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.440584] env[69982]: WARNING nova.compute.manager [req-d979d8b3-db8a-4b89-88df-d869c4b35b38 req-26a31d92-ff83-448c-91ab-802112f2e8a5 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received unexpected event network-vif-plugged-84b43cbd-49c3-4deb-aac7-06329e863173 for instance with vm_state building and task_state spawning. [ 1081.542339] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Successfully updated port: 84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1081.575904] env[69982]: DEBUG nova.compute.manager [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1081.576173] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.577475] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e691503a-539b-48c8-9283-3a1d01e4fa0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.588061] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.591615] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-316fc515-3f9c-4fb3-8d82-6e781712e95d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.594451] env[69982]: INFO nova.scheduler.client.report [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted allocation for migration ea7d255f-79eb-4118-bb96-acb700b60ec6 [ 1081.604834] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1081.604834] env[69982]: value = "task-3865269" [ 1081.604834] env[69982]: _type = "Task" [ 1081.604834] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.618353] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.827097] env[69982]: DEBUG oslo_vmware.api [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865268, 'name': PowerOnVM_Task, 'duration_secs': 0.503499} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.832470] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1081.832470] env[69982]: INFO nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Took 7.94 seconds to spawn the instance on the hypervisor. [ 1081.832470] env[69982]: DEBUG nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1081.832470] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e74171-ed2d-4ed2-b265-3ed58a4744a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.906840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3459621a-9a43-4994-a753-d5b5ba127835 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.915813] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65000803-a35a-464e-8d18-c80eba133cf7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.919787] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.953262] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee541b14-ffb6-47bd-92a6-fd8bbe6fab06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.963277] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ca0c832-a704-4c22-98c3-5bd854465509 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.978354] env[69982]: DEBUG nova.compute.provider_tree [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.047365] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.047530] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.047746] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1082.105265] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b032774b-b378-431b-bca2-93ecb83ba601 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 19.185s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.115842] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865269, 'name': PowerOffVM_Task, 'duration_secs': 0.232791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.116188] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1082.116380] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1082.116547] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ebda213-3e29-4d4b-851d-aa47d32444e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.188108] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1082.188388] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1082.188576] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Deleting the datastore file [datastore1] dffd4365-d2b6-4201-be46-a823399bb2a6 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1082.189184] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef605f54-e4b4-4970-adf4-9275e01232d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.197052] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for the task: (returnval){ [ 1082.197052] env[69982]: value = "task-3865271" [ 1082.197052] env[69982]: _type = "Task" [ 1082.197052] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.209740] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.354641] env[69982]: INFO nova.compute.manager [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Took 29.71 seconds to build instance. [ 1082.482220] env[69982]: DEBUG nova.scheduler.client.report [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1082.581125] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1082.708449] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865271, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.760789] env[69982]: DEBUG nova.network.neutron [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.858029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-32b7576c-9b88-459c-beb9-1eacd8ffd64d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.221s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.905073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.905073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.987612] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.979s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.990039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.906s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.990291] env[69982]: DEBUG nova.objects.instance [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lazy-loading 'resources' on Instance uuid 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.014295] env[69982]: INFO nova.scheduler.client.report [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocations for instance 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82 [ 1083.071568] env[69982]: DEBUG nova.objects.instance [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.209084] env[69982]: DEBUG oslo_vmware.api [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Task: {'id': task-3865271, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.648781} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.209864] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1083.209864] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1083.209864] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1083.210167] env[69982]: INFO nova.compute.manager [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1083.210167] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.211059] env[69982]: DEBUG nova.compute.manager [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1083.211059] env[69982]: DEBUG nova.network.neutron [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1083.264307] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.264762] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Instance network_info: |[{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.265233] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:0e:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84b43cbd-49c3-4deb-aac7-06329e863173', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.273334] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating folder: Project (a9f274f86bbe43d4b92ac058f100ba0f). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.273675] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33c25233-513a-4cf5-87ed-d5bcccc92281 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.286374] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created folder: Project (a9f274f86bbe43d4b92ac058f100ba0f) in parent group-v767796. [ 1083.286979] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating folder: Instances. Parent ref: group-v768061. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.287114] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1665c693-9108-4ed1-b5bb-809934144307 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.299042] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created folder: Instances in parent group-v768061. [ 1083.299339] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.299555] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.299778] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1acf1ac-6ae8-478c-bea3-f646c860a104 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.320611] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.320611] env[69982]: value = "task-3865274" [ 1083.320611] env[69982]: _type = "Task" [ 1083.320611] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.329498] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865274, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.410535] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1083.517405] env[69982]: DEBUG nova.compute.manager [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-changed-84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.517621] env[69982]: DEBUG nova.compute.manager [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing instance network info cache due to event network-changed-84b43cbd-49c3-4deb-aac7-06329e863173. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1083.517817] env[69982]: DEBUG oslo_concurrency.lockutils [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.517960] env[69982]: DEBUG oslo_concurrency.lockutils [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.518137] env[69982]: DEBUG nova.network.neutron [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing network info cache for port 84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1083.526120] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a1026807-16e3-4dbb-b831-03c85d4165c4 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.530s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.557958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.558261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.558471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.558660] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.558831] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.561245] env[69982]: INFO nova.compute.manager [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Terminating instance [ 1083.576244] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.576421] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.576625] env[69982]: DEBUG nova.network.neutron [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1083.576844] env[69982]: DEBUG nova.objects.instance [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'info_cache' on Instance uuid d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1083.794713] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf186d0-d97b-4dbc-bb67-4c76904f1f4f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.804097] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a703202b-9ccc-4677-b800-187089e0f88f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.838311] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92a91fe-3373-4132-9544-1ea1833ef2de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.850588] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865274, 'name': CreateVM_Task, 'duration_secs': 0.347132} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.852239] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1137c49c-360e-485f-aa78-ffcf938d9a91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.856261] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.856982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.857173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.857504] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.858148] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15d6e033-bfc0-45e4-ae0f-eeee8c5f418a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.868803] env[69982]: DEBUG nova.compute.provider_tree [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.871579] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1083.871579] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ba603d-3c80-0ab1-8501-3f369b439134" [ 1083.871579] env[69982]: _type = "Task" [ 1083.871579] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.882271] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ba603d-3c80-0ab1-8501-3f369b439134, 'name': SearchDatastore_Task, 'duration_secs': 0.013105} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.882575] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.882845] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.883120] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.883341] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.883670] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.883947] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a66a2ae9-fcaa-4754-a0a4-6eed76fac80f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.893420] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.893591] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.894366] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1556f9cb-a22f-444c-a7c6-25869a8ab80b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.900064] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1083.900064] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259c513-e76f-83e8-0001-e12fd1446131" [ 1083.900064] env[69982]: _type = "Task" [ 1083.900064] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.908709] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259c513-e76f-83e8-0001-e12fd1446131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.926430] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.976641] env[69982]: DEBUG nova.network.neutron [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.065895] env[69982]: DEBUG nova.compute.manager [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1084.066223] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1084.067340] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d965fd3a-b64c-4a62-8079-c4caaad9a574 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.075960] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1084.078448] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80a8e5d9-cb83-47b8-891c-4ac84578302c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.081571] env[69982]: DEBUG nova.objects.base [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1084.091458] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1084.091458] env[69982]: value = "task-3865275" [ 1084.091458] env[69982]: _type = "Task" [ 1084.091458] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.103373] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.338833] env[69982]: DEBUG nova.network.neutron [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updated VIF entry in instance network info cache for port 84b43cbd-49c3-4deb-aac7-06329e863173. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1084.339255] env[69982]: DEBUG nova.network.neutron [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.373579] env[69982]: DEBUG nova.scheduler.client.report [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1084.411888] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5259c513-e76f-83e8-0001-e12fd1446131, 'name': SearchDatastore_Task, 'duration_secs': 0.009736} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.412767] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91246921-8693-47af-b1ff-bf532ccf904c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.419504] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1084.419504] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250b492-400b-6c41-0f01-e287f7f09011" [ 1084.419504] env[69982]: _type = "Task" [ 1084.419504] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.428288] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250b492-400b-6c41-0f01-e287f7f09011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.479040] env[69982]: INFO nova.compute.manager [-] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Took 1.27 seconds to deallocate network for instance. [ 1084.602434] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865275, 'name': PowerOffVM_Task, 'duration_secs': 0.216139} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.602721] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1084.602896] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1084.603286] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e53d2c18-fb4b-4608-b0ae-3481b75df99b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.671349] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1084.671585] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1084.671776] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore1] 834d66a7-4626-4d85-8e6c-db4a8ec39be0 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1084.672142] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-615b6c61-2b70-48b5-8611-3860dfb2951f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.681641] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1084.681641] env[69982]: value = "task-3865277" [ 1084.681641] env[69982]: _type = "Task" [ 1084.681641] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.694788] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.842865] env[69982]: DEBUG oslo_concurrency.lockutils [req-8ca094a0-d312-4038-a416-c12543f90108 req-a21c49b3-5ba8-479f-972a-e90e98811155 service nova] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.876535] env[69982]: DEBUG nova.network.neutron [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [{"id": "cc682899-b104-4e53-b80d-49a30d6e0316", "address": "fa:16:3e:93:cf:8e", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc682899-b1", "ovs_interfaceid": "cc682899-b104-4e53-b80d-49a30d6e0316", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.878388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.881767] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.718s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.881899] env[69982]: DEBUG nova.objects.instance [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lazy-loading 'resources' on Instance uuid 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.898834] env[69982]: INFO nova.scheduler.client.report [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted allocations for instance 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a [ 1084.930807] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250b492-400b-6c41-0f01-e287f7f09011, 'name': SearchDatastore_Task, 'duration_secs': 0.01037} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.931120] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.931401] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cf08cf32-f3d4-494f-a51b-a40616e76429/cf08cf32-f3d4-494f-a51b-a40616e76429.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.931689] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05a65cf3-0f88-4af4-bae8-51ebb2385225 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.938645] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1084.938645] env[69982]: value = "task-3865278" [ 1084.938645] env[69982]: _type = "Task" [ 1084.938645] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.947274] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.985208] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.193940] env[69982]: DEBUG oslo_vmware.api [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.194118] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1085.194311] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1085.194489] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1085.194663] env[69982]: INFO nova.compute.manager [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1085.194911] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1085.195146] env[69982]: DEBUG nova.compute.manager [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1085.195269] env[69982]: DEBUG nova.network.neutron [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1085.288533] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.288760] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.382225] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-d21659fd-015d-4f5b-b4b5-f38f550e0f00" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.410425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad985b1d-c12d-4a5c-967c-f5ffd65aace2 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.042s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.463527] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865278, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.573211] env[69982]: DEBUG nova.compute.manager [req-ede54202-28be-4fe8-aca8-06875d0ee61d req-6c7f7ecf-6cf0-4a85-b58e-2bb63b9b6480 service nova] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Received event network-vif-deleted-ba417169-23c9-4d5d-86ad-2ab83634ddb2 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.573211] env[69982]: DEBUG nova.compute.manager [req-ede54202-28be-4fe8-aca8-06875d0ee61d req-6c7f7ecf-6cf0-4a85-b58e-2bb63b9b6480 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Received event network-vif-deleted-c6f21ecb-5489-44f1-8ffa-8c7b630004a3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.573211] env[69982]: INFO nova.compute.manager [req-ede54202-28be-4fe8-aca8-06875d0ee61d req-6c7f7ecf-6cf0-4a85-b58e-2bb63b9b6480 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Neutron deleted interface c6f21ecb-5489-44f1-8ffa-8c7b630004a3; detaching it from the instance and deleting it from the info cache [ 1085.573362] env[69982]: DEBUG nova.network.neutron [req-ede54202-28be-4fe8-aca8-06875d0ee61d req-6c7f7ecf-6cf0-4a85-b58e-2bb63b9b6480 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.695835] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63e6f81-ad44-4a6e-8c98-29ea23fe9009 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.703986] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677b6fb9-781f-47c8-b3c9-0cca51d49898 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.734359] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f753d8-c497-474c-8610-8fe71ee8e493 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.741960] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a175976-67b2-49b5-a365-aaadb867d2d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.757076] env[69982]: DEBUG nova.compute.provider_tree [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.791620] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1085.952394] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865278, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.537221} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.952680] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] cf08cf32-f3d4-494f-a51b-a40616e76429/cf08cf32-f3d4-494f-a51b-a40616e76429.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.952953] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.953265] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edf3d20c-d901-4c9b-aafd-3f52c865e8a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.960547] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1085.960547] env[69982]: value = "task-3865279" [ 1085.960547] env[69982]: _type = "Task" [ 1085.960547] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.970412] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.989016] env[69982]: DEBUG nova.network.neutron [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.078681] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f3fe246b-712b-4e25-b775-ee5b94dfd167 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.089050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbbb170d-46e7-4b86-8b9d-e3cbc950f33a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.120420] env[69982]: DEBUG nova.compute.manager [req-ede54202-28be-4fe8-aca8-06875d0ee61d req-6c7f7ecf-6cf0-4a85-b58e-2bb63b9b6480 service nova] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Detach interface failed, port_id=c6f21ecb-5489-44f1-8ffa-8c7b630004a3, reason: Instance 834d66a7-4626-4d85-8e6c-db4a8ec39be0 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1086.261059] env[69982]: DEBUG nova.scheduler.client.report [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.314098] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.388251] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.388578] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be35bbdf-6a5c-4464-acf3-c2c892257097 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.396773] env[69982]: DEBUG oslo_vmware.api [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1086.396773] env[69982]: value = "task-3865280" [ 1086.396773] env[69982]: _type = "Task" [ 1086.396773] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.405056] env[69982]: DEBUG oslo_vmware.api [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.471645] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070919} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.471932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1086.472796] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea17e60-2d1d-4d9a-8317-ed779c5d8251 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.495614] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] cf08cf32-f3d4-494f-a51b-a40616e76429/cf08cf32-f3d4-494f-a51b-a40616e76429.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1086.496131] env[69982]: INFO nova.compute.manager [-] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Took 1.30 seconds to deallocate network for instance. [ 1086.496397] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63302a86-77eb-4ac7-a65a-5ee310ab0d36 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.520115] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1086.520115] env[69982]: value = "task-3865281" [ 1086.520115] env[69982]: _type = "Task" [ 1086.520115] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.530342] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.766284] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.885s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.768753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.523s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.770327] env[69982]: INFO nova.compute.claims [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.785565] env[69982]: INFO nova.scheduler.client.report [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted allocations for instance 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb [ 1086.909339] env[69982]: DEBUG oslo_vmware.api [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865280, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.019131] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.030121] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865281, 'name': ReconfigVM_Task, 'duration_secs': 0.338929} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.030415] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfigured VM instance instance-0000005f to attach disk [datastore2] cf08cf32-f3d4-494f-a51b-a40616e76429/cf08cf32-f3d4-494f-a51b-a40616e76429.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.031075] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-463d83ae-67b7-419e-bcec-c19fa3612634 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.038481] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1087.038481] env[69982]: value = "task-3865282" [ 1087.038481] env[69982]: _type = "Task" [ 1087.038481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.046923] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865282, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.292595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d78e53c7-4638-4df5-aeea-ca86c118995f tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "0b973aa8-6d25-4de9-8a6b-7bb9f65671fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.830s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.407569] env[69982]: DEBUG oslo_vmware.api [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865280, 'name': PowerOnVM_Task, 'duration_secs': 0.636161} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.407842] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1087.408060] env[69982]: DEBUG nova.compute.manager [None req-6c27aa3a-f310-4778-861c-44f405f4cc2f tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1087.408840] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816536df-3862-4a37-bc3f-713d3f753771 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.548934] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865282, 'name': Rename_Task, 'duration_secs': 0.146594} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.549271] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.549433] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6bf9bd60-7d67-43a9-8159-d1354e9265be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.557225] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1087.557225] env[69982]: value = "task-3865283" [ 1087.557225] env[69982]: _type = "Task" [ 1087.557225] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.565812] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865283, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.069409] env[69982]: DEBUG oslo_vmware.api [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865283, 'name': PowerOnVM_Task, 'duration_secs': 0.482643} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.070785] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.071033] env[69982]: INFO nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Took 7.58 seconds to spawn the instance on the hypervisor. [ 1088.071181] env[69982]: DEBUG nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.071971] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc181a76-7f2f-4db2-8095-093771b3ec55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.075184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7effafd0-a12b-4be9-bab4-910d18d74171 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.084965] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaa5208-520d-493d-b043-cd20869d708c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.124579] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298553d2-a5df-44fb-b802-fbc38a0ef2bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.135100] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b7492e-7e08-4b1c-af83-860d8b577184 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.154363] env[69982]: DEBUG nova.compute.provider_tree [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.529694] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.529944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.567044] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.567438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.598177] env[69982]: INFO nova.compute.manager [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Took 22.84 seconds to build instance. [ 1088.657780] env[69982]: DEBUG nova.scheduler.client.report [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.865198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.865677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.865950] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.866242] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.866454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.868874] env[69982]: INFO nova.compute.manager [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Terminating instance [ 1089.032767] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1089.070032] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1089.100277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3d173d8-cc1a-45f0-a6fc-591baf51567e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.348s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.165051] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.165051] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1089.166401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.752s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.166660] env[69982]: DEBUG nova.objects.instance [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'resources' on Instance uuid 8518f3c8-738d-468a-9f57-de50e4e67108 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.372944] env[69982]: DEBUG nova.compute.manager [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.373289] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.374372] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b00ae0-3a3a-45eb-8cd8-3dfe957e9689 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.384534] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.384885] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2010c398-addd-48a8-8151-5824e8fb1560 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.392311] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1089.392311] env[69982]: value = "task-3865284" [ 1089.392311] env[69982]: _type = "Task" [ 1089.392311] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.402479] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865284, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.560149] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.591567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.670015] env[69982]: DEBUG nova.compute.utils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1089.674142] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1089.674498] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1089.744734] env[69982]: DEBUG nova.policy [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c3c9e0fda95c4e3cad6be27dd54c22d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'babff47774384a5ca2d938bcc6331aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1089.911219] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865284, 'name': PowerOffVM_Task, 'duration_secs': 0.306688} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.911606] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1089.912263] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1089.912263] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92fba170-5b42-4510-a5fc-44e677ddf723 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.983785] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1089.984102] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1089.984293] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleting the datastore file [datastore1] d21659fd-015d-4f5b-b4b5-f38f550e0f00 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1089.984587] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50b481b9-3526-40f1-be03-28a28b8d4b9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.993730] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1089.993730] env[69982]: value = "task-3865286" [ 1089.993730] env[69982]: _type = "Task" [ 1089.993730] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.004359] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.035315] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b59ed3-9e87-42eb-bf2a-693ea3a6153f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.044882] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dc09de-9889-4106-aed3-d464fcec96ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.079826] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de0eca1-c403-4f59-8b44-37fd56b886a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.089863] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ce606f-38ce-4e2b-a589-4e63e203db9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.095482] env[69982]: DEBUG nova.compute.manager [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-changed-84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.095703] env[69982]: DEBUG nova.compute.manager [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing instance network info cache due to event network-changed-84b43cbd-49c3-4deb-aac7-06329e863173. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1090.095938] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.096116] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1090.096298] env[69982]: DEBUG nova.network.neutron [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing network info cache for port 84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1090.110279] env[69982]: DEBUG nova.compute.provider_tree [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1090.178250] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1090.255239] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Successfully created port: b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1090.504565] env[69982]: DEBUG oslo_vmware.api [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148274} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.504968] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.505698] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.505698] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.505698] env[69982]: INFO nova.compute.manager [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1090.505877] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.506774] env[69982]: DEBUG nova.compute.manager [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.506774] env[69982]: DEBUG nova.network.neutron [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1090.613910] env[69982]: DEBUG nova.scheduler.client.report [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1090.862759] env[69982]: DEBUG nova.network.neutron [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updated VIF entry in instance network info cache for port 84b43cbd-49c3-4deb-aac7-06329e863173. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1090.862759] env[69982]: DEBUG nova.network.neutron [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.985826] env[69982]: DEBUG nova.compute.manager [req-af6754bf-2b07-4688-909d-3db4f7f66a97 req-e81d0a32-eec6-4970-8eae-f6c7af3c9048 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Received event network-vif-deleted-cc682899-b104-4e53-b80d-49a30d6e0316 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1090.986116] env[69982]: INFO nova.compute.manager [req-af6754bf-2b07-4688-909d-3db4f7f66a97 req-e81d0a32-eec6-4970-8eae-f6c7af3c9048 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Neutron deleted interface cc682899-b104-4e53-b80d-49a30d6e0316; detaching it from the instance and deleting it from the info cache [ 1090.986267] env[69982]: DEBUG nova.network.neutron [req-af6754bf-2b07-4688-909d-3db4f7f66a97 req-e81d0a32-eec6-4970-8eae-f6c7af3c9048 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.119224] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.953s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.121619] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.730s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.121920] env[69982]: DEBUG nova.objects.instance [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'resources' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.151310] env[69982]: INFO nova.scheduler.client.report [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted allocations for instance 8518f3c8-738d-468a-9f57-de50e4e67108 [ 1091.189848] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1091.219337] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1091.219608] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1091.219764] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1091.219941] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1091.220103] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1091.220255] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1091.220474] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1091.220633] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1091.220797] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1091.220958] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1091.221145] env[69982]: DEBUG nova.virt.hardware [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1091.222662] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0004562f-57ab-48b4-846f-56e77147677a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.232293] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdb31db-8677-46ff-a620-c2f455929c6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.366982] env[69982]: DEBUG oslo_concurrency.lockutils [req-7fc051c8-cf13-4c5b-a345-acbcba9b6dff req-6a29c3ae-477a-42d7-9c97-6c0c17ea8980 service nova] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.470607] env[69982]: DEBUG nova.network.neutron [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.488290] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c813d734-0245-498a-a9a6-d85f553c54c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.499586] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564d0ffb-96df-44ed-a146-25129ab1d100 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.529726] env[69982]: DEBUG nova.compute.manager [req-af6754bf-2b07-4688-909d-3db4f7f66a97 req-e81d0a32-eec6-4970-8eae-f6c7af3c9048 service nova] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Detach interface failed, port_id=cc682899-b104-4e53-b80d-49a30d6e0316, reason: Instance d21659fd-015d-4f5b-b4b5-f38f550e0f00 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1091.627054] env[69982]: DEBUG nova.objects.instance [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'numa_topology' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.658200] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5e5c5746-d142-4c99-972b-53ce98928606 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "8518f3c8-738d-468a-9f57-de50e4e67108" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.924s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.788240] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Successfully updated port: b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.974209] env[69982]: INFO nova.compute.manager [-] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Took 1.47 seconds to deallocate network for instance. [ 1092.116267] env[69982]: DEBUG nova.compute.manager [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Received event network-vif-plugged-b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.116544] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.117389] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.117389] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.117655] env[69982]: DEBUG nova.compute.manager [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] No waiting events found dispatching network-vif-plugged-b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1092.117912] env[69982]: WARNING nova.compute.manager [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Received unexpected event network-vif-plugged-b3a7809c-bd2b-4433-aab2-dc4c413eff31 for instance with vm_state building and task_state spawning. [ 1092.118180] env[69982]: DEBUG nova.compute.manager [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Received event network-changed-b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.118448] env[69982]: DEBUG nova.compute.manager [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Refreshing instance network info cache due to event network-changed-b3a7809c-bd2b-4433-aab2-dc4c413eff31. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1092.118695] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Acquiring lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.118849] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Acquired lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.119044] env[69982]: DEBUG nova.network.neutron [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Refreshing network info cache for port b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1092.129832] env[69982]: DEBUG nova.objects.base [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1092.294411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.412872] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5586fbf1-f24e-49b9-85d2-8bcd794749d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.422647] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b228b4e-dc38-4fd3-9e21-4a079bb3dd3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.456159] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a120aed-ff95-4f0e-9f4a-212f167281be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.466056] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdf3bd1-d968-4e18-afd9-8239624a5d3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.481479] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.482015] env[69982]: DEBUG nova.compute.provider_tree [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.660805] env[69982]: DEBUG nova.network.neutron [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1092.761673] env[69982]: DEBUG nova.network.neutron [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.985591] env[69982]: DEBUG nova.scheduler.client.report [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.262219] env[69982]: DEBUG oslo_concurrency.lockutils [req-ccded1d6-6745-4d28-baf5-3a6429a8e755 req-d2ea76b0-8b43-49d9-b5a0-ac3755ec8625 service nova] Releasing lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.262693] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.263017] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1093.491235] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.369s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.493837] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.474s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.494083] env[69982]: DEBUG nova.objects.instance [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'resources' on Instance uuid d43e5e7a-577d-4fe9-aff7-9012adfbdb9a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.808092] env[69982]: DEBUG nova.compute.manager [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1093.814353] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1093.988628] env[69982]: DEBUG nova.network.neutron [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [{"id": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "address": "fa:16:3e:21:93:f4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3a7809c-bd", "ovs_interfaceid": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.002051] env[69982]: DEBUG oslo_concurrency.lockutils [None req-158435ce-9cb5-48da-8166-cb768627e3ba tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 40.455s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.002968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 17.776s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.003171] env[69982]: INFO nova.compute.manager [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Unshelving [ 1094.255375] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8432744a-b63f-416b-9cd0-52a83fc8816a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.263865] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f541778b-4629-480f-87fa-988f06245a0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.294654] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759d1efa-6317-4084-8fe1-dd10fdcff1a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.303076] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0466fb-07d8-48c5-835f-2f88788a1021 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.321868] env[69982]: DEBUG nova.compute.provider_tree [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.333087] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.492060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.492377] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance network_info: |[{"id": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "address": "fa:16:3e:21:93:f4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3a7809c-bd", "ovs_interfaceid": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1094.492818] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:93:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56398cc0-e39f-410f-8036-8c2a6870e26f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b3a7809c-bd2b-4433-aab2-dc4c413eff31', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1094.500632] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1094.500858] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1094.501111] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6466c262-9842-4a1f-a117-5704a9b955a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.523470] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1094.523470] env[69982]: value = "task-3865287" [ 1094.523470] env[69982]: _type = "Task" [ 1094.523470] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.532623] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865287, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.826220] env[69982]: DEBUG nova.scheduler.client.report [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.034219] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865287, 'name': CreateVM_Task, 'duration_secs': 0.299108} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.034382] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1095.035091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.035982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.035982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1095.036885] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.037140] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-437136b1-9162-47dd-9737-2e469577ea9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.042357] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1095.042357] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520fb5ec-1490-c536-cbc7-a1e334fcc3fb" [ 1095.042357] env[69982]: _type = "Task" [ 1095.042357] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.050472] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520fb5ec-1490-c536-cbc7-a1e334fcc3fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.208894] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.209183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.209404] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.209586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.209756] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.212132] env[69982]: INFO nova.compute.manager [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Terminating instance [ 1095.330970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.333878] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 15.844s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.352052] env[69982]: INFO nova.scheduler.client.report [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted allocations for instance d43e5e7a-577d-4fe9-aff7-9012adfbdb9a [ 1095.553365] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520fb5ec-1490-c536-cbc7-a1e334fcc3fb, 'name': SearchDatastore_Task, 'duration_secs': 0.00981} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.553706] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.553957] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1095.554300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.554423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.554602] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.554864] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-370d8cb1-221a-4d4a-94cd-561b57a7aed2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.563981] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1095.564255] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1095.565091] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1126266c-ab1b-426a-9144-fcd55bf24b26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.570335] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1095.570335] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527964bd-5991-b7b6-428f-60f69eedf4ef" [ 1095.570335] env[69982]: _type = "Task" [ 1095.570335] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.580690] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527964bd-5991-b7b6-428f-60f69eedf4ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.716663] env[69982]: DEBUG nova.compute.manager [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.716809] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.717892] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7849ad1-424d-40be-b581-85614e32234e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.727641] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.728547] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc6e78ca-42cf-470d-a514-9694d77c8971 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.735244] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1095.735244] env[69982]: value = "task-3865288" [ 1095.735244] env[69982]: _type = "Task" [ 1095.735244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.745624] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865288, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.865659] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90981f23-4caf-44c9-bdcf-7327fc4b719a tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "d43e5e7a-577d-4fe9-aff7-9012adfbdb9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.404s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.083035] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527964bd-5991-b7b6-428f-60f69eedf4ef, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.083542] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99d7a2ef-6da6-42f1-86cd-656efe740f12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.091429] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1096.091429] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ac373b-915f-fa8b-af35-9762005dc015" [ 1096.091429] env[69982]: _type = "Task" [ 1096.091429] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.100272] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ac373b-915f-fa8b-af35-9762005dc015, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.124109] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05a5c51-d59b-42a7-915d-3fa0d8396040 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.131750] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d918d36-f21e-48f7-9e9d-488795968d83 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.162532] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ef407d-cb10-424d-ae66-25c11055a2e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.170618] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8181315a-6e60-44e5-b536-fb2a4fda32d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.186090] env[69982]: DEBUG nova.compute.provider_tree [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.245801] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865288, 'name': PowerOffVM_Task, 'duration_secs': 0.191514} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.246079] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1096.246296] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1096.246576] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4164ee99-7aa4-427b-808d-54dab783bc9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.311375] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.311594] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.311779] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleting the datastore file [datastore1] 5bbc7b58-3e8e-495f-911a-072d282e48a9 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.312075] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-994fdcda-5892-474a-b4fe-f14ecc775da7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.319472] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1096.319472] env[69982]: value = "task-3865290" [ 1096.319472] env[69982]: _type = "Task" [ 1096.319472] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.328750] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.349482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "cd839916-6daf-4b31-941d-6305a585bfaa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.349739] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.349976] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.350210] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.350400] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.352745] env[69982]: INFO nova.compute.manager [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Terminating instance [ 1096.602430] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ac373b-915f-fa8b-af35-9762005dc015, 'name': SearchDatastore_Task, 'duration_secs': 0.010622} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.602646] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.602907] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1096.603192] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c5c518f-3487-483c-8fd7-4158c61442c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.609838] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1096.609838] env[69982]: value = "task-3865291" [ 1096.609838] env[69982]: _type = "Task" [ 1096.609838] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.617821] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.689710] env[69982]: DEBUG nova.scheduler.client.report [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.832504] env[69982]: DEBUG oslo_vmware.api [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142568} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.832672] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.832731] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.832917] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.833142] env[69982]: INFO nova.compute.manager [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1096.833420] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.833644] env[69982]: DEBUG nova.compute.manager [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.833736] env[69982]: DEBUG nova.network.neutron [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1096.857346] env[69982]: DEBUG nova.compute.manager [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1096.857346] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1096.859112] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96575c0f-2537-44a4-a7fd-1446a85cfe8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.869179] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1096.871073] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d57b40e5-a9e5-4bf5-a36f-100a3dd086d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.878467] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1096.878467] env[69982]: value = "task-3865292" [ 1096.878467] env[69982]: _type = "Task" [ 1096.878467] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.888498] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.126098] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482419} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.126098] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1097.126098] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1097.126927] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d2e48a8-fd07-44a6-824f-36585084a640 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.135488] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1097.135488] env[69982]: value = "task-3865293" [ 1097.135488] env[69982]: _type = "Task" [ 1097.135488] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.149671] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.388455] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865292, 'name': PowerOffVM_Task, 'duration_secs': 0.278141} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.388766] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1097.389018] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1097.389165] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c92352e-f054-400f-aa48-ed7dfd87df3a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.460307] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1097.460904] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1097.460904] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleting the datastore file [datastore2] cd839916-6daf-4b31-941d-6305a585bfaa {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1097.461066] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f87cd368-a2a4-45ee-8ee3-117071541c95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.469159] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for the task: (returnval){ [ 1097.469159] env[69982]: value = "task-3865295" [ 1097.469159] env[69982]: _type = "Task" [ 1097.469159] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.484177] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865295, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.503022] env[69982]: DEBUG nova.compute.manager [req-82a82eb1-1945-4320-87c0-14999c87a465 req-c1a9fb40-89b0-40f5-a29b-1075635fcf43 service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Received event network-vif-deleted-53fdfe60-22a6-4234-b170-b003d2f42494 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1097.503022] env[69982]: INFO nova.compute.manager [req-82a82eb1-1945-4320-87c0-14999c87a465 req-c1a9fb40-89b0-40f5-a29b-1075635fcf43 service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Neutron deleted interface 53fdfe60-22a6-4234-b170-b003d2f42494; detaching it from the instance and deleting it from the info cache [ 1097.503022] env[69982]: DEBUG nova.network.neutron [req-82a82eb1-1945-4320-87c0-14999c87a465 req-c1a9fb40-89b0-40f5-a29b-1075635fcf43 service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.645682] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065849} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.645912] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1097.646674] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe15fac0-1adb-4532-ae32-c7bd3333764f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.678286] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.678752] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.678963] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a95c1a32-8b5f-49de-80df-3e8286f35da4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.693392] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1097.701032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.367s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.705376] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.786s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.705618] env[69982]: DEBUG nova.objects.instance [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lazy-loading 'resources' on Instance uuid 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1097.706850] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1097.706850] env[69982]: value = "task-3865296" [ 1097.706850] env[69982]: _type = "Task" [ 1097.706850] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.719359] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865296, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.981896] env[69982]: DEBUG oslo_vmware.api [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Task: {'id': task-3865295, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162121} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.982263] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.982263] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.982418] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.982596] env[69982]: INFO nova.compute.manager [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1097.982882] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1097.983139] env[69982]: DEBUG nova.compute.manager [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1097.983236] env[69982]: DEBUG nova.network.neutron [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1097.985233] env[69982]: DEBUG nova.network.neutron [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.005865] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2bf78cf4-57c5-4286-90ba-b9ca5c32d165 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.020204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54de39e-335a-4721-a188-b296f6fe3603 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.065426] env[69982]: DEBUG nova.compute.manager [req-82a82eb1-1945-4320-87c0-14999c87a465 req-c1a9fb40-89b0-40f5-a29b-1075635fcf43 service nova] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Detach interface failed, port_id=53fdfe60-22a6-4234-b170-b003d2f42494, reason: Instance 5bbc7b58-3e8e-495f-911a-072d282e48a9 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.204780] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.205389] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1098.205389] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.225917] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865296, 'name': ReconfigVM_Task, 'duration_secs': 0.311164} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.225917] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.226898] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46ff99c6-4185-484b-b6ef-4ef80483c87a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.234508] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1098.234508] env[69982]: value = "task-3865297" [ 1098.234508] env[69982]: _type = "Task" [ 1098.234508] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.249589] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865297, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.288746] env[69982]: INFO nova.scheduler.client.report [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocation for migration 0520604d-a406-402f-9686-aee9f2ea548e [ 1098.488467] env[69982]: INFO nova.compute.manager [-] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Took 1.65 seconds to deallocate network for instance. [ 1098.538964] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7c5779-9689-4cf5-a345-962d1fa0eeeb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.548032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df18a52-4362-420a-b5cc-4e1415db8832 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.580829] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dda7835-440e-42f6-8d6b-ab3647dc341d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.589181] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915f18a2-0925-41ba-93bf-a59c2f3a4b00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.605423] env[69982]: DEBUG nova.compute.provider_tree [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1098.707910] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.744997] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865297, 'name': Rename_Task, 'duration_secs': 0.144476} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.745183] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.745442] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fb13072-ccd2-421f-b1e6-307026223504 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.752214] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1098.752214] env[69982]: value = "task-3865298" [ 1098.752214] env[69982]: _type = "Task" [ 1098.752214] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.760323] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865298, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.797874] env[69982]: DEBUG oslo_concurrency.lockutils [None req-330468e7-e11f-4497-94ba-7d264364a91f tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 22.372s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.799158] env[69982]: DEBUG nova.network.neutron [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.819475] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.819701] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.819960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.820191] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.820428] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.823541] env[69982]: INFO nova.compute.manager [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Terminating instance [ 1098.995233] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.108063] env[69982]: DEBUG nova.scheduler.client.report [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.263362] env[69982]: DEBUG oslo_vmware.api [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865298, 'name': PowerOnVM_Task, 'duration_secs': 0.466955} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.263670] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.263968] env[69982]: INFO nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Took 8.07 seconds to spawn the instance on the hypervisor. [ 1099.264078] env[69982]: DEBUG nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1099.264898] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cf1e516-51dc-4355-9ff9-898da1de9a22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.305325] env[69982]: INFO nova.compute.manager [-] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Took 1.32 seconds to deallocate network for instance. [ 1099.332457] env[69982]: DEBUG nova.compute.manager [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1099.332457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.332775] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4e8944-5138-4fc1-9c36-c8e6a1d0c01c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.341645] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.341767] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4516c326-b9eb-4a6a-8428-d838561a5b11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.350811] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1099.350811] env[69982]: value = "task-3865299" [ 1099.350811] env[69982]: _type = "Task" [ 1099.350811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.362182] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.463698] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.463698] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.549899] env[69982]: DEBUG nova.compute.manager [req-238f9580-5ae9-4b95-813e-f8791b8ae2ff req-31debd92-a6bc-4659-8763-83b6c9d4fc8f service nova] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Received event network-vif-deleted-b5a29604-4742-41d4-b219-8bbd59089c94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1099.613515] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.908s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.616052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.690s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.617505] env[69982]: INFO nova.compute.claims [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1099.636336] env[69982]: INFO nova.scheduler.client.report [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Deleted allocations for instance 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3 [ 1099.783937] env[69982]: INFO nova.compute.manager [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Took 27.56 seconds to build instance. [ 1099.813252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.862961] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865299, 'name': PowerOffVM_Task, 'duration_secs': 0.307813} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.863265] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1099.863744] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1099.864059] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5acb7509-0988-4a0c-872d-79a1f5db78f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.939055] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1099.939055] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1099.939055] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore2] fc46eca6-6a60-477c-98de-a2e1f6c7e88b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1099.939892] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e6ddc03-601b-4b96-8034-4e26082a0ced {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.948119] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1099.948119] env[69982]: value = "task-3865301" [ 1099.948119] env[69982]: _type = "Task" [ 1099.948119] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.958335] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.965947] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1100.145452] env[69982]: DEBUG oslo_concurrency.lockutils [None req-84248884-13d9-4e0e-94d4-316f11dcbf42 tempest-ImagesTestJSON-532780232 tempest-ImagesTestJSON-532780232-project-member] Lock "68c212df-c7a0-45c5-a00c-f94c6a9a9bb3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.897s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.285754] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bdf60f04-6b23-406d-9abf-88d862fde754 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.069s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.458532] env[69982]: DEBUG oslo_vmware.api [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480581} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.458788] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1100.459129] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1100.459320] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1100.459497] env[69982]: INFO nova.compute.manager [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1100.459742] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.459938] env[69982]: DEBUG nova.compute.manager [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1100.460052] env[69982]: DEBUG nova.network.neutron [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1100.484529] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.867607] env[69982]: DEBUG nova.compute.manager [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1100.960261] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52217cb7-d063-45bc-9754-f07682698cb1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.975359] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42f2ec37-74a3-4fee-8465-7d5006b45685 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.010407] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19629c9d-3e13-4489-80c0-ba01a2e19f8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.019342] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1dff33b-9d4a-4c70-abae-a2c587887fd4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.037409] env[69982]: DEBUG nova.compute.provider_tree [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1101.245029] env[69982]: DEBUG nova.network.neutron [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1101.388249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.541325] env[69982]: DEBUG nova.scheduler.client.report [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.578240] env[69982]: DEBUG nova.compute.manager [req-aaf78118-f6cb-447d-abad-0d156a40c888 req-53883d5d-4ac2-441b-a593-c4519fc79f6d service nova] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Received event network-vif-deleted-35894e50-7421-402a-91f6-e5a640cebd85 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1101.754356] env[69982]: INFO nova.compute.manager [-] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Took 1.29 seconds to deallocate network for instance. [ 1102.047048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.047635] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1102.050710] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.066s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.050917] env[69982]: DEBUG nova.objects.instance [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lazy-loading 'resources' on Instance uuid dffd4365-d2b6-4201-be46-a823399bb2a6 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.262028] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.554031] env[69982]: DEBUG nova.compute.utils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1102.558299] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1102.558502] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1102.624463] env[69982]: DEBUG nova.policy [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3eef7e0eac494f6187b7bd324a63be17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfd55ee4c33142449b7f61928fba77d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1102.831533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdb6092-81d7-446a-9939-0416647c946d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.840021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73400f8-5acc-494e-bbbf-994a37fcfda9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.871444] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ebe459-34df-4189-89fe-54025a2c2b69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.879703] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d79f2ab-c831-4c13-986d-114e59ff28c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.895734] env[69982]: DEBUG nova.compute.provider_tree [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1102.997080] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Successfully created port: fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1103.059353] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1103.402565] env[69982]: DEBUG nova.scheduler.client.report [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.907482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.910107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.596s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.911722] env[69982]: INFO nova.compute.claims [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1103.937089] env[69982]: INFO nova.scheduler.client.report [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Deleted allocations for instance dffd4365-d2b6-4201-be46-a823399bb2a6 [ 1104.069071] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1104.098251] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1104.098524] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1104.098683] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1104.098893] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1104.099059] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1104.099211] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1104.099416] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1104.099577] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1104.099744] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1104.099904] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1104.100094] env[69982]: DEBUG nova.virt.hardware [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1104.101102] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6dabd82-2f2a-45ae-8070-e9ce181d2090 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.110094] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6481658d-6354-41c7-a972-913bf7c10fd7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.445437] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eb3cbdc4-0b75-4507-9069-b3766d11f217 tempest-InstanceActionsTestJSON-674041303 tempest-InstanceActionsTestJSON-674041303-project-member] Lock "dffd4365-d2b6-4201-be46-a823399bb2a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.379s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.592651] env[69982]: DEBUG nova.compute.manager [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Received event network-vif-plugged-fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.593031] env[69982]: DEBUG oslo_concurrency.lockutils [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] Acquiring lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.593171] env[69982]: DEBUG oslo_concurrency.lockutils [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.593259] env[69982]: DEBUG oslo_concurrency.lockutils [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.593480] env[69982]: DEBUG nova.compute.manager [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] No waiting events found dispatching network-vif-plugged-fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1104.593664] env[69982]: WARNING nova.compute.manager [req-c63f465a-ce60-4b1e-96f8-7699e738dff4 req-c01b4ff8-50ab-4cb5-832d-562a86a3c865 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Received unexpected event network-vif-plugged-fe6324a8-865a-45fc-baef-1309b21878cd for instance with vm_state building and task_state spawning. [ 1104.700607] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Successfully updated port: fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1105.203725] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1105.203880] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1105.204843] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1105.212579] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea62794-2ad5-4d8b-8b13-14899e1d5be6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.221862] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbba9e5b-94ba-4e81-944b-28a71f897561 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.254701] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0471040-2c24-450d-b3ee-e5883324c15b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.263725] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60045466-2ff1-481d-8144-a88651138471 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.280397] env[69982]: DEBUG nova.compute.provider_tree [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.746286] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1105.783988] env[69982]: DEBUG nova.scheduler.client.report [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1105.918492] env[69982]: DEBUG nova.network.neutron [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Updating instance_info_cache with network_info: [{"id": "fe6324a8-865a-45fc-baef-1309b21878cd", "address": "fa:16:3e:a5:ee:38", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe6324a8-86", "ovs_interfaceid": "fe6324a8-865a-45fc-baef-1309b21878cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.289621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.290216] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1106.294116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.275s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.294371] env[69982]: DEBUG nova.objects.instance [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid 834d66a7-4626-4d85-8e6c-db4a8ec39be0 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1106.423592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.424124] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Instance network_info: |[{"id": "fe6324a8-865a-45fc-baef-1309b21878cd", "address": "fa:16:3e:a5:ee:38", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe6324a8-86", "ovs_interfaceid": "fe6324a8-865a-45fc-baef-1309b21878cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1106.424584] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:ee:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe6324a8-865a-45fc-baef-1309b21878cd', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1106.432733] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Creating folder: Project (cfd55ee4c33142449b7f61928fba77d7). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1106.433050] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92535c65-dae4-4af8-9a3b-bd021dbac22f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.444778] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Created folder: Project (cfd55ee4c33142449b7f61928fba77d7) in parent group-v767796. [ 1106.445029] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Creating folder: Instances. Parent ref: group-v768065. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1106.445308] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c5c3de1-cd74-49c9-aed9-f86b1ad136f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.457999] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Created folder: Instances in parent group-v768065. [ 1106.457999] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.458156] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1106.458326] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9b857e5-dcf1-4afb-ae6c-4a4d3751160a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.480109] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1106.480109] env[69982]: value = "task-3865304" [ 1106.480109] env[69982]: _type = "Task" [ 1106.480109] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.488789] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865304, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.720479] env[69982]: DEBUG nova.compute.manager [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Received event network-changed-fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.720479] env[69982]: DEBUG nova.compute.manager [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Refreshing instance network info cache due to event network-changed-fe6324a8-865a-45fc-baef-1309b21878cd. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1106.720681] env[69982]: DEBUG oslo_concurrency.lockutils [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] Acquiring lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.720794] env[69982]: DEBUG oslo_concurrency.lockutils [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] Acquired lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.720954] env[69982]: DEBUG nova.network.neutron [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Refreshing network info cache for port fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1106.797768] env[69982]: DEBUG nova.compute.utils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1106.799605] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1106.799792] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1106.861451] env[69982]: DEBUG nova.policy [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c84472005ef43d99658fa6f5cf59bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07f7b975ecb449a290e2ae6582e07016', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1106.997872] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865304, 'name': CreateVM_Task, 'duration_secs': 0.309861} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.998160] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1107.000178] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.000498] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.000979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1107.001374] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ad09f8f-9dd8-4472-a743-d8955515206a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.012584] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1107.012584] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e3e5a6-21b4-dacf-1e10-50b5b604f2be" [ 1107.012584] env[69982]: _type = "Task" [ 1107.012584] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.022905] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e3e5a6-21b4-dacf-1e10-50b5b604f2be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.125302] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca94fb8-ef61-4d37-8cb4-fcd1af5cda9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.133832] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a9e265-1f06-4a91-a724-d6186d836fa3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.170721] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2661176b-3245-446c-a5de-673877625b7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.179606] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de35c2de-274b-48a2-bafc-5168f98b3802 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.194742] env[69982]: DEBUG nova.compute.provider_tree [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.227126] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Successfully created port: 9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.309631] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1107.375353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "28518353-0bff-460f-8384-f0376280917d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1107.375353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1107.461023] env[69982]: DEBUG nova.network.neutron [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Updated VIF entry in instance network info cache for port fe6324a8-865a-45fc-baef-1309b21878cd. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1107.461465] env[69982]: DEBUG nova.network.neutron [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Updating instance_info_cache with network_info: [{"id": "fe6324a8-865a-45fc-baef-1309b21878cd", "address": "fa:16:3e:a5:ee:38", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.110", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe6324a8-86", "ovs_interfaceid": "fe6324a8-865a-45fc-baef-1309b21878cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.524299] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e3e5a6-21b4-dacf-1e10-50b5b604f2be, 'name': SearchDatastore_Task, 'duration_secs': 0.026679} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.524603] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1107.524840] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1107.525130] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.525285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.525502] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1107.525799] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59b24c14-2ee2-4508-8b2c-cc3e7c9016bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.535260] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1107.535446] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1107.536200] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d749c5e3-ba02-4453-a962-5aa04d6c225c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.541926] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1107.541926] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5239db4b-c633-2dda-0870-921e75916ac7" [ 1107.541926] env[69982]: _type = "Task" [ 1107.541926] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.552499] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5239db4b-c633-2dda-0870-921e75916ac7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.698017] env[69982]: DEBUG nova.scheduler.client.report [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1107.877629] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1107.963986] env[69982]: DEBUG oslo_concurrency.lockutils [req-a3db97ff-b2cb-409e-b014-4b138dd294d0 req-1c55122b-173a-45b8-a936-48e2f7ab0216 service nova] Releasing lock "refresh_cache-6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.052724] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5239db4b-c633-2dda-0870-921e75916ac7, 'name': SearchDatastore_Task, 'duration_secs': 0.009973} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.053550] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43654881-de50-472f-b865-f0df298367a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.059549] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1108.059549] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3b9b2-88c9-d402-36cb-95ed1ca2bbb9" [ 1108.059549] env[69982]: _type = "Task" [ 1108.059549] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.068211] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3b9b2-88c9-d402-36cb-95ed1ca2bbb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.204047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.910s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.206451] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.647s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.208690] env[69982]: INFO nova.compute.claims [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.228664] env[69982]: INFO nova.scheduler.client.report [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance 834d66a7-4626-4d85-8e6c-db4a8ec39be0 [ 1108.324390] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1108.351606] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1108.351857] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.352026] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1108.352218] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.352364] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1108.352586] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1108.352807] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1108.352967] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1108.353405] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1108.353614] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1108.353797] env[69982]: DEBUG nova.virt.hardware [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1108.354688] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d505c0-5fed-4bb5-bf60-829fed1ae60b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.363093] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366db79c-b66f-49db-a803-3719bff2f423 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.397105] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.570590] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a3b9b2-88c9-d402-36cb-95ed1ca2bbb9, 'name': SearchDatastore_Task, 'duration_secs': 0.0115} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.571068] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.571242] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a/6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1108.571401] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87307aff-f352-4322-9357-90a531647388 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.578560] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1108.578560] env[69982]: value = "task-3865305" [ 1108.578560] env[69982]: _type = "Task" [ 1108.578560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.586701] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.740158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b4548ebb-41d7-4bce-b358-7fcaac677e9f tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "834d66a7-4626-4d85-8e6c-db4a8ec39be0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.181s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.802841] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Successfully updated port: 9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1108.865221] env[69982]: DEBUG nova.compute.manager [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Received event network-vif-plugged-9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.865999] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] Acquiring lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.866281] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.866853] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.867355] env[69982]: DEBUG nova.compute.manager [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] No waiting events found dispatching network-vif-plugged-9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1108.867719] env[69982]: WARNING nova.compute.manager [req-5bca18ba-3f5a-41fe-950b-ef8b367e4b27 req-be9623f1-e46a-4552-aaff-d72b48cad5c1 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Received unexpected event network-vif-plugged-9292dad6-1ffe-4506-ba58-fb92f9a98323 for instance with vm_state building and task_state spawning. [ 1108.894048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.894528] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.894884] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.896136] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.896136] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.898807] env[69982]: INFO nova.compute.manager [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Terminating instance [ 1109.091691] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865305, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.304802] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.304983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.305154] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1109.405225] env[69982]: DEBUG nova.compute.manager [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1109.405450] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.406497] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f396aca5-e03e-4de3-8ed4-82618869c3d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.414648] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.417243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98454a6f-ea1a-4dbd-8faf-5cb58a376a05 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.424371] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1109.424371] env[69982]: value = "task-3865306" [ 1109.424371] env[69982]: _type = "Task" [ 1109.424371] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.434641] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.482162] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b833fe-6b6c-4e2e-aafd-6c247d71017f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.491026] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64202fe-f2f4-4a04-8988-ee54d49f588e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.522533] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c731cd8d-b103-4be9-aa7d-e927e3b08587 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.530793] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a29138-d5af-4478-b153-030bfd3455ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.546488] env[69982]: DEBUG nova.compute.provider_tree [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1109.595367] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865305, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540969} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.596014] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a/6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1109.596465] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1109.596945] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf6a1dd2-af31-43f3-b715-82945362a175 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.606124] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1109.606124] env[69982]: value = "task-3865307" [ 1109.606124] env[69982]: _type = "Task" [ 1109.606124] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.616324] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865307, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.837146] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1109.935169] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865306, 'name': PowerOffVM_Task, 'duration_secs': 0.262754} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.935499] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.935499] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1109.935648] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51d7901a-3649-41dd-a9ed-b545812d862e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.004286] env[69982]: DEBUG nova.network.neutron [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updating instance_info_cache with network_info: [{"id": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "address": "fa:16:3e:7c:ea:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9292dad6-1f", "ovs_interfaceid": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.028865] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1110.029109] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1110.029345] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore2] 9c0d0f4f-9e88-4e67-99d9-d957652587cd {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1110.029625] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26c292f2-e387-44d8-829b-1c5cbc3416b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.037591] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1110.037591] env[69982]: value = "task-3865309" [ 1110.037591] env[69982]: _type = "Task" [ 1110.037591] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.045976] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865309, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.049180] env[69982]: DEBUG nova.scheduler.client.report [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.116573] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865307, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107218} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.117060] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.117926] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256df656-bb3b-48f7-bb8a-16c113819f5b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.141091] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a/6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.141474] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e5d2985-8630-4e8a-80de-e9a83dca2c81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.162183] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1110.162183] env[69982]: value = "task-3865310" [ 1110.162183] env[69982]: _type = "Task" [ 1110.162183] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.170939] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865310, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.506720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.507219] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Instance network_info: |[{"id": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "address": "fa:16:3e:7c:ea:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9292dad6-1f", "ovs_interfaceid": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1110.507792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:ea:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9292dad6-1ffe-4506-ba58-fb92f9a98323', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.518968] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.519234] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.519477] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ca6cb23f-2ef5-409d-8b19-47df245b25de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.539811] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.539811] env[69982]: value = "task-3865311" [ 1110.539811] env[69982]: _type = "Task" [ 1110.539811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.550888] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865311, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.554428] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.554934] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.557470] env[69982]: DEBUG oslo_vmware.api [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865309, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149347} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.557750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.966s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.559148] env[69982]: INFO nova.compute.claims [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1110.561572] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1110.561762] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1110.561935] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1110.562115] env[69982]: INFO nova.compute.manager [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1110.562350] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.562882] env[69982]: DEBUG nova.compute.manager [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1110.562992] env[69982]: DEBUG nova.network.neutron [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1110.672184] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865310, 'name': ReconfigVM_Task, 'duration_secs': 0.315359} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.672540] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a/6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1110.673221] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd4d306a-bfde-4174-988e-daa9c3b287db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.681057] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1110.681057] env[69982]: value = "task-3865312" [ 1110.681057] env[69982]: _type = "Task" [ 1110.681057] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.689909] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865312, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.898380] env[69982]: DEBUG nova.compute.manager [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Received event network-changed-9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.898552] env[69982]: DEBUG nova.compute.manager [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Refreshing instance network info cache due to event network-changed-9292dad6-1ffe-4506-ba58-fb92f9a98323. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1110.898768] env[69982]: DEBUG oslo_concurrency.lockutils [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] Acquiring lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.898912] env[69982]: DEBUG oslo_concurrency.lockutils [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] Acquired lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.899077] env[69982]: DEBUG nova.network.neutron [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Refreshing network info cache for port 9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1111.053891] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865311, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.064579] env[69982]: DEBUG nova.compute.utils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.068505] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.068505] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.116188] env[69982]: DEBUG nova.policy [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbe1397ab59c4dacbfd8418f1b2eaa70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf2351d172d94606a82ec7e5eabc6faa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.191663] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865312, 'name': Rename_Task, 'duration_secs': 0.3159} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.192273] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.192894] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5971032d-028e-4991-a934-f7cbe0926b4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.202541] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1111.202541] env[69982]: value = "task-3865313" [ 1111.202541] env[69982]: _type = "Task" [ 1111.202541] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.213501] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865313, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.316681] env[69982]: DEBUG nova.network.neutron [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.553973] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865311, 'name': CreateVM_Task, 'duration_secs': 0.723883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.554236] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.555009] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.555143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.555481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1111.555851] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbebc0ca-f189-4e9a-843e-308e75ff8d65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.561208] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1111.561208] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5266c6bd-043c-e953-fe0d-52a3bac80675" [ 1111.561208] env[69982]: _type = "Task" [ 1111.561208] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.568708] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1111.584464] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5266c6bd-043c-e953-fe0d-52a3bac80675, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.637343] env[69982]: DEBUG nova.network.neutron [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updated VIF entry in instance network info cache for port 9292dad6-1ffe-4506-ba58-fb92f9a98323. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1111.637819] env[69982]: DEBUG nova.network.neutron [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updating instance_info_cache with network_info: [{"id": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "address": "fa:16:3e:7c:ea:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9292dad6-1f", "ovs_interfaceid": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.700536] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Successfully created port: bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1111.716274] env[69982]: DEBUG oslo_vmware.api [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865313, 'name': PowerOnVM_Task, 'duration_secs': 0.499767} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.716330] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1111.716558] env[69982]: INFO nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1111.716729] env[69982]: DEBUG nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1111.717756] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf450ca-06a0-4926-95a6-fe69c46f271e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.818916] env[69982]: INFO nova.compute.manager [-] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Took 1.26 seconds to deallocate network for instance. [ 1111.908582] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7994d9b-83e8-4ec0-b2c2-d3df5b9f209d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.918079] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37721de5-5ed5-4567-95e6-99beb7ba3e8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.951196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdfd9b4-8509-44a4-a10b-c1b1ac76f271 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.959522] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0091658d-3297-4207-913d-7a7d7f187f38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.973685] env[69982]: DEBUG nova.compute.provider_tree [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.075348] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5266c6bd-043c-e953-fe0d-52a3bac80675, 'name': SearchDatastore_Task, 'duration_secs': 0.01068} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.075697] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.076012] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.076283] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.076432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.076629] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.076900] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea875adc-47f7-428f-bde6-6bb59a93c3f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.089768] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.089962] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1112.091085] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b329eed-680d-4a45-b57f-dfd94b8a9c9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.098260] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1112.098260] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527ce5c1-cc3c-a024-7811-81deee37bf09" [ 1112.098260] env[69982]: _type = "Task" [ 1112.098260] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.107563] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527ce5c1-cc3c-a024-7811-81deee37bf09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.141420] env[69982]: DEBUG oslo_concurrency.lockutils [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] Releasing lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.141420] env[69982]: DEBUG nova.compute.manager [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Received event network-vif-deleted-ee39c037-aaca-4a17-aa77-f50203a74e94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1112.141752] env[69982]: INFO nova.compute.manager [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Neutron deleted interface ee39c037-aaca-4a17-aa77-f50203a74e94; detaching it from the instance and deleting it from the info cache [ 1112.141752] env[69982]: DEBUG nova.network.neutron [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.237482] env[69982]: INFO nova.compute.manager [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Took 28.32 seconds to build instance. [ 1112.329674] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.476742] env[69982]: DEBUG nova.scheduler.client.report [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.592820] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1112.610316] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527ce5c1-cc3c-a024-7811-81deee37bf09, 'name': SearchDatastore_Task, 'duration_secs': 0.010329} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.613262] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d15a84b7-8e24-4919-8e08-121ad7a713e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.619443] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1112.619672] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.619828] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1112.620015] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.620163] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1112.620310] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1112.620519] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1112.620697] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1112.620873] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1112.621082] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1112.621287] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1112.622183] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da27d48c-6427-4091-af65-87c52b9abd8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.626759] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1112.626759] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521ea41c-cb45-dee9-2041-90285268c87f" [ 1112.626759] env[69982]: _type = "Task" [ 1112.626759] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.634064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49b1320c-49e5-419b-ad63-fcaa4442abde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.641469] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521ea41c-cb45-dee9-2041-90285268c87f, 'name': SearchDatastore_Task, 'duration_secs': 0.011731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.642148] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.642451] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 92f898e2-0dfd-45ed-b74b-958f6c5af844/92f898e2-0dfd-45ed-b74b-958f6c5af844.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.642738] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd7add62-0606-43d4-beef-43204b8c15d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.653754] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5118b56-72bd-4c4b-8617-7f5ce1c8433a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.658826] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1112.658826] env[69982]: value = "task-3865314" [ 1112.658826] env[69982]: _type = "Task" [ 1112.658826] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.666932] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678f493a-5dc9-4b94-9734-9ba0cd95a8da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.681951] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865314, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.703428] env[69982]: DEBUG nova.compute.manager [req-45276f1b-37db-45bd-9d69-46d535a0144c req-e46a61b8-1309-46dc-ba03-205fb0aa1342 service nova] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Detach interface failed, port_id=ee39c037-aaca-4a17-aa77-f50203a74e94, reason: Instance 9c0d0f4f-9e88-4e67-99d9-d957652587cd could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1112.740615] env[69982]: DEBUG oslo_concurrency.lockutils [None req-747b09e0-dacb-4af8-8414-45539d240c0d tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.835s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.983349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.984018] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1112.987448] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.506s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.987783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.989899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.657s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.011413] env[69982]: INFO nova.scheduler.client.report [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted allocations for instance d21659fd-015d-4f5b-b4b5-f38f550e0f00 [ 1113.170454] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865314, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492397} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.170829] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 92f898e2-0dfd-45ed-b74b-958f6c5af844/92f898e2-0dfd-45ed-b74b-958f6c5af844.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.170959] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.171247] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6cc49fc-7df5-4470-8309-0e4803e814ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.180024] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1113.180024] env[69982]: value = "task-3865315" [ 1113.180024] env[69982]: _type = "Task" [ 1113.180024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.190815] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.200705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.200966] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.327652] env[69982]: DEBUG nova.compute.manager [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Received event network-vif-plugged-bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.327876] env[69982]: DEBUG oslo_concurrency.lockutils [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] Acquiring lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.328242] env[69982]: DEBUG oslo_concurrency.lockutils [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.328327] env[69982]: DEBUG oslo_concurrency.lockutils [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.328458] env[69982]: DEBUG nova.compute.manager [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] No waiting events found dispatching network-vif-plugged-bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1113.328618] env[69982]: WARNING nova.compute.manager [req-d601ec80-37b8-4113-8a06-fe57d1261dc5 req-be2131d7-ecde-4b9b-8275-0bfb5657727c service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Received unexpected event network-vif-plugged-bef3a859-8f07-4807-8007-407dd8247fe3 for instance with vm_state building and task_state spawning. [ 1113.488727] env[69982]: DEBUG nova.compute.utils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1113.490771] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1113.490953] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.496200] env[69982]: INFO nova.compute.claims [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1113.521033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-51d5dc1b-f719-41d6-b1ee-150de593b159 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "d21659fd-015d-4f5b-b4b5-f38f550e0f00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.655s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.539668] env[69982]: DEBUG nova.policy [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbe1397ab59c4dacbfd8418f1b2eaa70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf2351d172d94606a82ec7e5eabc6faa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1113.690694] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075343} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.691377] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.692214] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146c0ca1-81af-4e19-87f7-0959880e1421 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.707756] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1113.719062] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 92f898e2-0dfd-45ed-b74b-958f6c5af844/92f898e2-0dfd-45ed-b74b-958f6c5af844.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.719648] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-877bcf8d-adf4-4659-8bce-ea15d383fd6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.740765] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1113.740765] env[69982]: value = "task-3865316" [ 1113.740765] env[69982]: _type = "Task" [ 1113.740765] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.749864] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.796032] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Successfully updated port: bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1113.815854] env[69982]: DEBUG nova.compute.manager [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Received event network-changed-bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.815854] env[69982]: DEBUG nova.compute.manager [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Refreshing instance network info cache due to event network-changed-bef3a859-8f07-4807-8007-407dd8247fe3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.816977] env[69982]: DEBUG oslo_concurrency.lockutils [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] Acquiring lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.816977] env[69982]: DEBUG oslo_concurrency.lockutils [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] Acquired lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.816977] env[69982]: DEBUG nova.network.neutron [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Refreshing network info cache for port bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1113.852927] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Successfully created port: 332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.994909] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1114.002065] env[69982]: INFO nova.compute.resource_tracker [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating resource usage from migration 8f21cc6b-0f54-4ba0-b9f2-3dc19533f868 [ 1114.238361] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.252738] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865316, 'name': ReconfigVM_Task, 'duration_secs': 0.316668} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.255832] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 92f898e2-0dfd-45ed-b74b-958f6c5af844/92f898e2-0dfd-45ed-b74b-958f6c5af844.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.256915] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75c66d40-d739-40f1-8a32-c23517b3d6ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.266054] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1114.266054] env[69982]: value = "task-3865317" [ 1114.266054] env[69982]: _type = "Task" [ 1114.266054] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.280387] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865317, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.303160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.311544] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97d8214-ee18-47e0-9b3f-58f79fd56d5f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.322279] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc6926f-d246-4619-bc32-6582b76952fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.355969] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f4b422-7ba1-4ed2-b7f8-7ceeb761bd0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.364402] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f893b279-0922-458a-a001-de6beba137fc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.371412] env[69982]: DEBUG nova.network.neutron [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1114.383646] env[69982]: DEBUG nova.compute.provider_tree [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.479216] env[69982]: DEBUG nova.network.neutron [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.781513] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865317, 'name': Rename_Task, 'duration_secs': 0.158547} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.781818] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1114.782091] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c09f9f4-046b-4c48-b9e2-99fc30e8e13b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.789013] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1114.789013] env[69982]: value = "task-3865318" [ 1114.789013] env[69982]: _type = "Task" [ 1114.789013] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.797161] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.887442] env[69982]: DEBUG nova.scheduler.client.report [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.982816] env[69982]: DEBUG oslo_concurrency.lockutils [req-b68d9027-c18e-4014-a951-1cfc96f38998 req-d347e289-d148-4769-aa37-a409723dbecd service nova] Releasing lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.983209] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.983369] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1115.004745] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1115.032958] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1115.033246] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1115.033415] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1115.033633] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1115.033793] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1115.033952] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1115.034185] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1115.034345] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1115.034530] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1115.034708] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1115.035084] env[69982]: DEBUG nova.virt.hardware [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1115.036020] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e366e80-d880-4321-b4fe-8aeee1e01e80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.044881] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2619b13-b333-468f-a0a1-fae966358f2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.105892] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.106423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.298850] env[69982]: DEBUG oslo_vmware.api [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865318, 'name': PowerOnVM_Task, 'duration_secs': 0.470273} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.299187] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.299409] env[69982]: INFO nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Took 6.97 seconds to spawn the instance on the hypervisor. [ 1115.299607] env[69982]: DEBUG nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.300442] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3775e2f2-5680-4ab5-a451-24360ec342b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.362189] env[69982]: DEBUG nova.compute.manager [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Received event network-vif-plugged-332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.362189] env[69982]: DEBUG oslo_concurrency.lockutils [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] Acquiring lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.362189] env[69982]: DEBUG oslo_concurrency.lockutils [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.362189] env[69982]: DEBUG oslo_concurrency.lockutils [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.362189] env[69982]: DEBUG nova.compute.manager [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] No waiting events found dispatching network-vif-plugged-332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.362427] env[69982]: WARNING nova.compute.manager [req-a738055a-a398-41f4-8bb7-84e2c9be49f0 req-438375ec-fc2a-4ff9-a073-7cdfb86aae5b service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Received unexpected event network-vif-plugged-332199ea-71d3-4b36-8f64-bdad19027689 for instance with vm_state building and task_state spawning. [ 1115.394023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.404s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.394391] env[69982]: INFO nova.compute.manager [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Migrating [ 1115.401946] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.365s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.403042] env[69982]: DEBUG nova.objects.instance [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'pci_requests' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.461852] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Successfully updated port: 332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.530784] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1115.609554] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1115.712915] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Updating instance_info_cache with network_info: [{"id": "bef3a859-8f07-4807-8007-407dd8247fe3", "address": "fa:16:3e:5b:01:17", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef3a859-8f", "ovs_interfaceid": "bef3a859-8f07-4807-8007-407dd8247fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1115.819867] env[69982]: INFO nova.compute.manager [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Took 29.52 seconds to build instance. [ 1115.911537] env[69982]: DEBUG nova.objects.instance [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'numa_topology' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.914744] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.914992] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.915187] env[69982]: DEBUG nova.network.neutron [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1115.964559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.964751] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.964893] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1116.132384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.215465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "refresh_cache-d10aaf26-7100-4313-bd57-d2cfefb16e3f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.215759] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance network_info: |[{"id": "bef3a859-8f07-4807-8007-407dd8247fe3", "address": "fa:16:3e:5b:01:17", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbef3a859-8f", "ovs_interfaceid": "bef3a859-8f07-4807-8007-407dd8247fe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1116.216244] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:01:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bef3a859-8f07-4807-8007-407dd8247fe3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1116.224957] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.225292] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1116.225565] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9f08cf10-7ab3-4e25-8e96-7d19a440379a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.248018] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1116.248018] env[69982]: value = "task-3865319" [ 1116.248018] env[69982]: _type = "Task" [ 1116.248018] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.257182] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865319, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.323273] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf56e3d8-7fce-4423-a858-a0787bde509d tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.034s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.415144] env[69982]: INFO nova.compute.claims [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1116.501784] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1116.648528] env[69982]: DEBUG nova.network.neutron [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Updating instance_info_cache with network_info: [{"id": "332199ea-71d3-4b36-8f64-bdad19027689", "address": "fa:16:3e:ba:2f:2e", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332199ea-71", "ovs_interfaceid": "332199ea-71d3-4b36-8f64-bdad19027689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.740798] env[69982]: DEBUG nova.network.neutron [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.760062] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865319, 'name': CreateVM_Task, 'duration_secs': 0.319223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.760243] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1116.761029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.761201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.761768] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1116.761850] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cbc82fa-e910-4330-ab0b-903eec6c0bc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.766808] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1116.766808] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b8133c-c10c-19f4-708b-e92eeecd2cfc" [ 1116.766808] env[69982]: _type = "Task" [ 1116.766808] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.775474] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b8133c-c10c-19f4-708b-e92eeecd2cfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.152362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.152679] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Instance network_info: |[{"id": "332199ea-71d3-4b36-8f64-bdad19027689", "address": "fa:16:3e:ba:2f:2e", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332199ea-71", "ovs_interfaceid": "332199ea-71d3-4b36-8f64-bdad19027689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.153160] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:2f:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bc9714ff-7109-4ea1-9435-b2b3fbdb9e81', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '332199ea-71d3-4b36-8f64-bdad19027689', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.161719] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.161981] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.162246] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc61694b-d896-419e-8fdc-c39476e99ea8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.183674] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.183674] env[69982]: value = "task-3865321" [ 1117.183674] env[69982]: _type = "Task" [ 1117.183674] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.193835] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865321, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.244221] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.281322] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b8133c-c10c-19f4-708b-e92eeecd2cfc, 'name': SearchDatastore_Task, 'duration_secs': 0.011808} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.281708] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.282014] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1117.282309] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.282518] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.282806] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1117.283060] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-150c95ae-ae55-47bd-a510-f3f4c0a8131e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.294556] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1117.295050] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1117.295901] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54842d9a-e6b5-4d32-a2be-bfd1dac1f85b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.303845] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.303845] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a224e3-feab-64ec-89c1-8d2391507ece" [ 1117.303845] env[69982]: _type = "Task" [ 1117.303845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.312977] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a224e3-feab-64ec-89c1-8d2391507ece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.394725] env[69982]: DEBUG nova.compute.manager [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Received event network-changed-332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.394984] env[69982]: DEBUG nova.compute.manager [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Refreshing instance network info cache due to event network-changed-332199ea-71d3-4b36-8f64-bdad19027689. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.395251] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Acquiring lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.395447] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Acquired lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.395654] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Refreshing network info cache for port 332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1117.680644] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588486b0-3bc1-43b7-a12c-77c20add4f34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.690783] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d79560b-2870-451e-9204-5b9e52d62dec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.696680] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865321, 'name': CreateVM_Task, 'duration_secs': 0.347695} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.697216] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.697949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.698134] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.698457] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1117.699033] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78cb9198-34f8-48f1-995a-66376b7979cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.726089] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76b3019-3afa-4717-a2bd-211e1bf6fd6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.729856] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.729856] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e6dc3-336d-b52e-ed4f-369f72b105d5" [ 1117.729856] env[69982]: _type = "Task" [ 1117.729856] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.737226] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfe0fa9-8f67-4421-87a7-f81dbe63ae8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.746398] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525e6dc3-336d-b52e-ed4f-369f72b105d5, 'name': SearchDatastore_Task, 'duration_secs': 0.010982} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.747147] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.747427] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1117.747681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.747826] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.748030] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1117.751724] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73bb3b69-6085-4662-b76c-4e180532d9b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.762246] env[69982]: DEBUG nova.compute.provider_tree [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.771585] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1117.771804] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1117.773283] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7150b71-81c8-472e-a2b9-a28b9453ec00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.781111] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.781111] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f4e815-379e-c0ca-7de9-8fe35afb02fc" [ 1117.781111] env[69982]: _type = "Task" [ 1117.781111] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.792591] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f4e815-379e-c0ca-7de9-8fe35afb02fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010053} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.794240] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e4ec028-e021-4454-8a56-f324b2ca6749 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.801384] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.801384] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52127a5f-522b-bf17-2e4f-0b3c3ce826db" [ 1117.801384] env[69982]: _type = "Task" [ 1117.801384] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.815946] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52127a5f-522b-bf17-2e4f-0b3c3ce826db, 'name': SearchDatastore_Task, 'duration_secs': 0.011234} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.819553] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.819850] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] f8107863-4eb1-4b8e-937e-30dc1e276f33/f8107863-4eb1-4b8e-937e-30dc1e276f33.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1117.820166] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a224e3-feab-64ec-89c1-8d2391507ece, 'name': SearchDatastore_Task, 'duration_secs': 0.010443} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.820390] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5e2a5f24-0230-444d-9cff-831ad676a8f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.823024] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-727557cc-ae60-49b5-860b-1a6fa383d99c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.829202] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.829202] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52687bb3-43ab-0f1d-b4a0-9d899741c960" [ 1117.829202] env[69982]: _type = "Task" [ 1117.829202] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.834430] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1117.834430] env[69982]: value = "task-3865322" [ 1117.834430] env[69982]: _type = "Task" [ 1117.834430] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.841895] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52687bb3-43ab-0f1d-b4a0-9d899741c960, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.847779] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.116344] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Updated VIF entry in instance network info cache for port 332199ea-71d3-4b36-8f64-bdad19027689. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1118.116836] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Updating instance_info_cache with network_info: [{"id": "332199ea-71d3-4b36-8f64-bdad19027689", "address": "fa:16:3e:ba:2f:2e", "network": {"id": "ed7c78ca-28cc-475c-8b5a-215f43cb02b4", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1540612227-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf2351d172d94606a82ec7e5eabc6faa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bc9714ff-7109-4ea1-9435-b2b3fbdb9e81", "external-id": "nsx-vlan-transportzone-887", "segmentation_id": 887, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap332199ea-71", "ovs_interfaceid": "332199ea-71d3-4b36-8f64-bdad19027689", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.266514] env[69982]: DEBUG nova.scheduler.client.report [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.339780] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52687bb3-43ab-0f1d-b4a0-9d899741c960, 'name': SearchDatastore_Task, 'duration_secs': 0.011789} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.343095] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.343382] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d10aaf26-7100-4313-bd57-d2cfefb16e3f/d10aaf26-7100-4313-bd57-d2cfefb16e3f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.343670] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e432c70f-0968-4e0d-b1cc-087e122f57ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.351352] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865322, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.352832] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1118.352832] env[69982]: value = "task-3865323" [ 1118.352832] env[69982]: _type = "Task" [ 1118.352832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.361034] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.621094] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Releasing lock "refresh_cache-f8107863-4eb1-4b8e-937e-30dc1e276f33" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.621094] env[69982]: DEBUG nova.compute.manager [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Received event network-changed-9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1118.621094] env[69982]: DEBUG nova.compute.manager [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Refreshing instance network info cache due to event network-changed-9292dad6-1ffe-4506-ba58-fb92f9a98323. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1118.621584] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Acquiring lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.621584] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Acquired lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.621584] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Refreshing network info cache for port 9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1118.775456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.373s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.779866] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.072s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.780146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.780382] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1118.780884] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.786s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.781230] env[69982]: DEBUG nova.objects.instance [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'resources' on Instance uuid 5bbc7b58-3e8e-495f-911a-072d282e48a9 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.783874] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58b2064-3609-4cf5-95a1-e86a7fa32a9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.790076] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94131e5-891e-4672-b9b4-e38d08d5ff41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.822781] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b889ae9-7a21-40e4-8fbe-6504a431354b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.827564] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1118.843622] env[69982]: INFO nova.network.neutron [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating port 1ded08ab-b715-4b57-81f3-69d6383c5a74 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1118.849225] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ec9994-7dc8-41dd-acf8-bfadeceda775 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.862024] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522247} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.863520] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a3c73c-4082-45a8-906f-2240c4e27c35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.866981] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] f8107863-4eb1-4b8e-937e-30dc1e276f33/f8107863-4eb1-4b8e-937e-30dc1e276f33.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1118.867373] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.870536] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-656fba43-824f-4da7-b548-81566f07ad12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.874273] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475502} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.874273] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] d10aaf26-7100-4313-bd57-d2cfefb16e3f/d10aaf26-7100-4313-bd57-d2cfefb16e3f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1118.874273] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.875555] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7cf1510-b633-4b4a-b76d-5a282d0098dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.905490] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178736MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1118.905490] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.907524] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1118.907524] env[69982]: value = "task-3865324" [ 1118.907524] env[69982]: _type = "Task" [ 1118.907524] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.913160] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1118.913160] env[69982]: value = "task-3865325" [ 1118.913160] env[69982]: _type = "Task" [ 1118.913160] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.920899] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.928487] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.334368] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1119.334661] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21db86c9-a334-47bb-8484-ffb079e4e2db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.344711] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1119.344711] env[69982]: value = "task-3865326" [ 1119.344711] env[69982]: _type = "Task" [ 1119.344711] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.368222] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865326, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.368222] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updated VIF entry in instance network info cache for port 9292dad6-1ffe-4506-ba58-fb92f9a98323. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1119.368443] env[69982]: DEBUG nova.network.neutron [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updating instance_info_cache with network_info: [{"id": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "address": "fa:16:3e:7c:ea:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9292dad6-1f", "ovs_interfaceid": "9292dad6-1ffe-4506-ba58-fb92f9a98323", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.422477] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165418} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.423195] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1119.424093] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11816c9a-92d4-45a9-a187-58ac58a986a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.432387] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068325} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.433382] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1119.434331] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dee30c-fa9e-4867-99e8-837183945fb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.457661] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] f8107863-4eb1-4b8e-937e-30dc1e276f33/f8107863-4eb1-4b8e-937e-30dc1e276f33.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.461736] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-256d459a-5bbc-4510-a0d2-c848985297dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.497367] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] d10aaf26-7100-4313-bd57-d2cfefb16e3f/d10aaf26-7100-4313-bd57-d2cfefb16e3f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.501537] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd425bd0-532f-4a3d-b215-1cfa9c87ba75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.518985] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1119.518985] env[69982]: value = "task-3865327" [ 1119.518985] env[69982]: _type = "Task" [ 1119.518985] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.524887] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1119.524887] env[69982]: value = "task-3865328" [ 1119.524887] env[69982]: _type = "Task" [ 1119.524887] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.535441] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.545508] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865328, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.722666] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d4c6a6-e835-4066-8e59-dc8edb8da31a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.733539] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e99459-3f68-452e-b974-ab5d2f53b46b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.589113] env[69982]: DEBUG oslo_concurrency.lockutils [req-569675e0-a407-404f-8c6f-677ea25bcb7e req-38be8887-fc62-4c29-8a06-a93f81552cc0 service nova] Releasing lock "refresh_cache-92f898e2-0dfd-45ed-b74b-958f6c5af844" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.591038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1120.591149] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.591247] env[69982]: DEBUG nova.network.neutron [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1120.603179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee20d888-79fd-4dc6-b865-8e46b2f4a577 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.608022] env[69982]: DEBUG nova.compute.manager [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1120.608022] env[69982]: DEBUG oslo_concurrency.lockutils [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.608022] env[69982]: DEBUG oslo_concurrency.lockutils [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.608022] env[69982]: DEBUG oslo_concurrency.lockutils [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.608022] env[69982]: DEBUG nova.compute.manager [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] No waiting events found dispatching network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1120.608235] env[69982]: WARNING nova.compute.manager [req-994067df-827b-44ab-8fdf-1932bd0da26d req-785cb230-c35b-4f5e-93d1-ea85c3f46160 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received unexpected event network-vif-plugged-1ded08ab-b715-4b57-81f3-69d6383c5a74 for instance with vm_state shelved_offloaded and task_state spawning. [ 1120.614951] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865327, 'name': ReconfigVM_Task, 'duration_secs': 0.400352} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.621789] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Reconfigured VM instance instance-00000064 to attach disk [datastore2] f8107863-4eb1-4b8e-937e-30dc1e276f33/f8107863-4eb1-4b8e-937e-30dc1e276f33.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.622719] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865326, 'name': PowerOffVM_Task, 'duration_secs': 0.357195} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.622924] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865328, 'name': ReconfigVM_Task, 'duration_secs': 0.50755} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.623125] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18b035a5-3b8f-4262-ae1c-d9df7f9f0eea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.625666] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f047a9b3-15f0-40f8-bd91-d0449e6204e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.629633] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1120.629824] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1120.633762] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Reconfigured VM instance instance-00000063 to attach disk [datastore1] d10aaf26-7100-4313-bd57-d2cfefb16e3f/d10aaf26-7100-4313-bd57-d2cfefb16e3f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.637613] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c4007c0-3953-4460-8698-5c409b4088c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.649107] env[69982]: DEBUG nova.compute.provider_tree [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1120.655529] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1120.655529] env[69982]: value = "task-3865331" [ 1120.655529] env[69982]: _type = "Task" [ 1120.655529] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.655529] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1120.655529] env[69982]: value = "task-3865330" [ 1120.655529] env[69982]: _type = "Task" [ 1120.655529] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.668210] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865330, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.670750] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865331, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.136194] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1121.136598] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1121.136778] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1121.136978] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1121.137193] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1121.137386] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1121.137667] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1121.137871] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1121.138135] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1121.138333] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1121.138532] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1121.144376] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9566dd46-cf5d-44dd-b766-7470ec313c1f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.156097] env[69982]: DEBUG nova.scheduler.client.report [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1121.174297] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1121.174297] env[69982]: value = "task-3865332" [ 1121.174297] env[69982]: _type = "Task" [ 1121.174297] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.181512] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865330, 'name': Rename_Task, 'duration_secs': 0.154386} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.181762] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865331, 'name': Rename_Task, 'duration_secs': 0.337683} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.185642] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1121.185934] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1121.186812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56c0a2f3-b91c-4777-ade1-8fb86afb2f49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.188271] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9a2e30b-df56-4ffa-90e1-e16e92a90982 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.197595] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.200586] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1121.200586] env[69982]: value = "task-3865333" [ 1121.200586] env[69982]: _type = "Task" [ 1121.200586] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.200940] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1121.200940] env[69982]: value = "task-3865334" [ 1121.200940] env[69982]: _type = "Task" [ 1121.200940] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.214115] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865334, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.217370] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.338325] env[69982]: DEBUG nova.network.neutron [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1121.670786] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.890s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.673638] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.860s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.673916] env[69982]: DEBUG nova.objects.instance [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lazy-loading 'resources' on Instance uuid cd839916-6daf-4b31-941d-6305a585bfaa {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1121.692999] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865332, 'name': ReconfigVM_Task, 'duration_secs': 0.203445} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.693102] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1121.708740] env[69982]: INFO nova.scheduler.client.report [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted allocations for instance 5bbc7b58-3e8e-495f-911a-072d282e48a9 [ 1121.719904] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865334, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.720181] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865333, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.841433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.881236] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='06552491a7f2b41a9f0a7e8f8e25cc07',container_format='bare',created_at=2025-05-07T07:12:05Z,direct_url=,disk_format='vmdk',id=07979a20-baf7-482a-918a-853610d09226,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-960821018-shelved',owner='251f58d95d51416d9d9fd54aa14546e2',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2025-05-07T07:12:21Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1121.881675] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1121.882025] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1121.882379] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1121.882689] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1121.882895] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1121.883234] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1121.883469] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1121.883731] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1121.884030] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1121.884316] env[69982]: DEBUG nova.virt.hardware [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1121.885653] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380ad1b4-a522-4e84-9ad4-805696198247 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.898021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0bb100-c9ea-49ae-9aa1-7cc077ec75aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.913501] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:93:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3cc0a33d-17c0-4b87-b48f-413a87a4cc6a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ded08ab-b715-4b57-81f3-69d6383c5a74', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1121.922331] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1121.922693] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1121.923058] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00d49468-8986-40c0-84f0-16b7500587ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.945637] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1121.945637] env[69982]: value = "task-3865335" [ 1121.945637] env[69982]: _type = "Task" [ 1121.945637] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.956961] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865335, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.200683] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1122.200991] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1122.201150] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1122.201329] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1122.201472] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1122.201640] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1122.201819] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1122.202036] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1122.202188] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1122.202347] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1122.202516] env[69982]: DEBUG nova.virt.hardware [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1122.208363] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfiguring VM instance instance-00000026 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1122.208861] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f8e304b-18e0-4bb5-94cd-f226bff9c7e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.233851] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a2977be0-5fda-4f29-9500-495fc2b544e0 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "5bbc7b58-3e8e-495f-911a-072d282e48a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.025s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.240956] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865333, 'name': PowerOnVM_Task, 'duration_secs': 0.806201} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.244919] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1122.245270] env[69982]: INFO nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Took 9.65 seconds to spawn the instance on the hypervisor. [ 1122.245412] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1122.245751] env[69982]: DEBUG oslo_vmware.api [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865334, 'name': PowerOnVM_Task, 'duration_secs': 0.721911} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.246015] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1122.246015] env[69982]: value = "task-3865337" [ 1122.246015] env[69982]: _type = "Task" [ 1122.246015] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.249290] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c54d7241-53a8-4dc7-a143-da4ec70d6863 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.251917] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1122.252130] env[69982]: INFO nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1122.252314] env[69982]: DEBUG nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1122.253966] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c99a026c-0c2a-4ec7-98cf-8bec00b24db7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.277339] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865337, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.285763] env[69982]: DEBUG nova.compute.manager [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1122.285953] env[69982]: DEBUG nova.compute.manager [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing instance network info cache due to event network-changed-1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1122.286181] env[69982]: DEBUG oslo_concurrency.lockutils [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.286317] env[69982]: DEBUG oslo_concurrency.lockutils [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.286467] env[69982]: DEBUG nova.network.neutron [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Refreshing network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1122.456593] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865335, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.521045] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4c9cd2a-7106-4b64-b17f-de621adb7022 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.531689] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfae7d5-7233-4aad-bd05-8217c0e95a81 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.567459] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264f7333-7286-4348-93d8-7d1101778100 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.576076] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ffb1e4-9dfd-4b62-950d-990bd5acc1c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.590835] env[69982]: DEBUG nova.compute.provider_tree [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.763051] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865337, 'name': ReconfigVM_Task, 'duration_secs': 0.325554} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.764044] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfigured VM instance instance-00000026 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1122.764193] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa25386-3601-4d85-8994-7acd83f7f5c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.795022] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.800641] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab433026-67f9-4c56-8144-b45e6abeeecb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.814874] env[69982]: INFO nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Took 33.28 seconds to build instance. [ 1122.816085] env[69982]: INFO nova.compute.manager [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Took 33.24 seconds to build instance. [ 1122.823908] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1122.823908] env[69982]: value = "task-3865338" [ 1122.823908] env[69982]: _type = "Task" [ 1122.823908] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.834634] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865338, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.956987] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865335, 'name': CreateVM_Task, 'duration_secs': 0.633492} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.957245] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1122.957967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1122.958154] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1122.958594] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1122.958891] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c18a3614-748f-4191-b74c-43e9d947a438 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.965290] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1122.965290] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522f2ef8-a4bf-9e69-a9e4-60fe61579c7a" [ 1122.965290] env[69982]: _type = "Task" [ 1122.965290] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.979614] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522f2ef8-a4bf-9e69-a9e4-60fe61579c7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.082578] env[69982]: DEBUG nova.network.neutron [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updated VIF entry in instance network info cache for port 1ded08ab-b715-4b57-81f3-69d6383c5a74. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1123.083055] env[69982]: DEBUG nova.network.neutron [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.094499] env[69982]: DEBUG nova.scheduler.client.report [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1123.158945] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.159485] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.318631] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.788s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.319058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-90a4d3cb-754d-4137-a525-769510cdecab tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.751s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.335369] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865338, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.476950] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.477341] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Processing image 07979a20-baf7-482a-918a-853610d09226 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1123.477416] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.477563] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.477751] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1123.478031] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4009c245-516b-41ed-a106-e15c2e738dfe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.493764] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1123.493999] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1123.494829] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67bef26b-32da-4009-8f95-7d6f915cde77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.501498] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1123.501498] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525811c7-0781-e2f7-a3e6-072535566621" [ 1123.501498] env[69982]: _type = "Task" [ 1123.501498] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.510745] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525811c7-0781-e2f7-a3e6-072535566621, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.586524] env[69982]: DEBUG oslo_concurrency.lockutils [req-586571d6-e764-4d69-802f-448ad7f86aa2 req-5591e008-ae46-4628-9aea-6e8418a5123d service nova] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1123.601288] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.604071] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.119s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.605331] env[69982]: INFO nova.compute.claims [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1123.625299] env[69982]: INFO nova.scheduler.client.report [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Deleted allocations for instance cd839916-6daf-4b31-941d-6305a585bfaa [ 1123.663330] env[69982]: INFO nova.compute.manager [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Detaching volume d0cf9a07-1efc-4787-be9e-c02fbe499c1d [ 1123.697956] env[69982]: INFO nova.virt.block_device [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Attempting to driver detach volume d0cf9a07-1efc-4787-be9e-c02fbe499c1d from mountpoint /dev/sdb [ 1123.698214] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1123.698442] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768034', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'name': 'volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6d390a12-bfb4-4d91-9e83-a81560a08e1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'serial': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1123.699275] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764a3ae7-b410-45ee-9d8c-dc3aa28e44d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.721698] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fe73f4-61af-4add-a391-75a4731737af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.729724] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d40d3da-4895-4af8-8e84-56806f5d3ec5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.750904] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da41002b-d2a8-4be3-a037-d81d01e19654 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.768378] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] The volume has not been displaced from its original location: [datastore2] volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d/volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1123.773958] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfiguring VM instance instance-0000004b to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1123.774317] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c32d214-86dd-4753-b109-8c4d7abe95aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.794972] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1123.794972] env[69982]: value = "task-3865339" [ 1123.794972] env[69982]: _type = "Task" [ 1123.794972] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.803423] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.820579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.820968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.821238] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.821433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.821654] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.823915] env[69982]: INFO nova.compute.manager [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Terminating instance [ 1123.835381] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865338, 'name': ReconfigVM_Task, 'duration_secs': 0.542539} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.835665] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7/4597a0b8-2c04-4755-8e0d-e00e5cdaacd7.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1123.835932] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1123.886152] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.886502] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.886664] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.886928] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.887324] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.890045] env[69982]: INFO nova.compute.manager [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Terminating instance [ 1124.012334] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1124.012584] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Fetch image to [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164/OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1124.012816] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Downloading stream optimized image 07979a20-baf7-482a-918a-853610d09226 to [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164/OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164.vmdk on the data store datastore1 as vApp {{(pid=69982) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1124.013011] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Downloading image file data 07979a20-baf7-482a-918a-853610d09226 to the ESX as VM named 'OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164' {{(pid=69982) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1124.104177] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1124.104177] env[69982]: value = "resgroup-9" [ 1124.104177] env[69982]: _type = "ResourcePool" [ 1124.104177] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1124.104509] env[69982]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4e85f299-9624-4d29-afb4-eb2b975b4e39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.129737] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lease: (returnval){ [ 1124.129737] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1124.129737] env[69982]: _type = "HttpNfcLease" [ 1124.129737] env[69982]: } obtained for vApp import into resource pool (val){ [ 1124.129737] env[69982]: value = "resgroup-9" [ 1124.129737] env[69982]: _type = "ResourcePool" [ 1124.129737] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1124.130044] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the lease: (returnval){ [ 1124.130044] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1124.130044] env[69982]: _type = "HttpNfcLease" [ 1124.130044] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1124.137766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d945ac0d-2774-4676-b8b7-7012246ead25 tempest-ServerRescueNegativeTestJSON-953957381 tempest-ServerRescueNegativeTestJSON-953957381-project-member] Lock "cd839916-6daf-4b31-941d-6305a585bfaa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.788s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.145126] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1124.145126] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1124.145126] env[69982]: _type = "HttpNfcLease" [ 1124.145126] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1124.307463] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865339, 'name': ReconfigVM_Task, 'duration_secs': 0.391609} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.307875] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Reconfigured VM instance instance-0000004b to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1124.314661] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d3d13a8-f9fb-4285-869a-1cbc02134956 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.331914] env[69982]: DEBUG nova.compute.manager [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1124.332285] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.333243] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24aa2fb-3a6f-4c56-982d-b67f609cc7f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.338374] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1124.338374] env[69982]: value = "task-3865342" [ 1124.338374] env[69982]: _type = "Task" [ 1124.338374] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.346645] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.347512] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd69e021-9ff8-4ad8-8c23-014531c2b327 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.350709] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a83982d4-0a36-422b-ac50-2967d4802df4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.356251] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865342, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.378977] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95eccd3-ab58-46be-8770-7314348d6f50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.382227] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1124.382227] env[69982]: value = "task-3865343" [ 1124.382227] env[69982]: _type = "Task" [ 1124.382227] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.403232] env[69982]: DEBUG nova.compute.manager [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1124.403572] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.404041] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1124.415098] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf2f349-577f-43d6-a432-410b14772b60 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.420333] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.428144] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.428621] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8f62a4b-486b-43d7-9eec-f5065034c153 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.436809] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1124.436809] env[69982]: value = "task-3865344" [ 1124.436809] env[69982]: _type = "Task" [ 1124.436809] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.446600] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.638277] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1124.638277] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1124.638277] env[69982]: _type = "HttpNfcLease" [ 1124.638277] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1124.848925] env[69982]: DEBUG oslo_vmware.api [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865342, 'name': ReconfigVM_Task, 'duration_secs': 0.211094} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.849493] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768034', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'name': 'volume-d0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6d390a12-bfb4-4d91-9e83-a81560a08e1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d', 'serial': 'd0cf9a07-1efc-4787-be9e-c02fbe499c1d'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1124.892575] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865343, 'name': PowerOffVM_Task, 'duration_secs': 0.199272} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.892894] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.893076] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.893340] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ad31f24-c92f-4f2f-8cde-8122a8cc786e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.909201] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0cdd2fe-d300-4f25-ba80-a895a01739ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.917778] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-accb2079-a0c7-4de2-a8a2-e11c0d9920b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.960260] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dddcec6-2edb-445e-af1d-7b73a30f1c06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.967959] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865344, 'name': PowerOffVM_Task, 'duration_secs': 0.211918} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.972053] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.972053] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.972053] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.972053] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.972309] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleting the datastore file [datastore1] d10aaf26-7100-4313-bd57-d2cfefb16e3f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.972491] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab79f281-da72-4616-a225-92533be23bdf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.974198] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70555314-249a-473d-9026-b4f0cc37794b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.976798] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014a113d-4c06-471b-9c5d-1ee099248519 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.992371] env[69982]: DEBUG nova.compute.provider_tree [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.995278] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1124.995278] env[69982]: value = "task-3865346" [ 1124.995278] env[69982]: _type = "Task" [ 1124.995278] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.996279] env[69982]: DEBUG nova.network.neutron [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Port c816f035-7c9d-47ba-8b3b-29a57ec10561 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1125.008487] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.138583] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1125.138583] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1125.138583] env[69982]: _type = "HttpNfcLease" [ 1125.138583] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1125.138918] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1125.138918] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b20b07-5b28-3c8b-ad55-fa34b42e1ecf" [ 1125.138918] env[69982]: _type = "HttpNfcLease" [ 1125.138918] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1125.139676] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b03c77f-c67c-48e6-9d7c-34089f1aae90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.147946] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1125.148127] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1125.211885] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.212129] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.212321] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleting the datastore file [datastore2] f8107863-4eb1-4b8e-937e-30dc1e276f33 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.212644] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0777687-4818-40f0-a0da-2eef696b4783 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.221630] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-049d1ab6-f56e-482e-bb84-50f938fd1c1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.223893] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for the task: (returnval){ [ 1125.223893] env[69982]: value = "task-3865348" [ 1125.223893] env[69982]: _type = "Task" [ 1125.223893] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.237361] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.421456] env[69982]: DEBUG nova.objects.instance [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid 6d390a12-bfb4-4d91-9e83-a81560a08e1a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.496871] env[69982]: DEBUG nova.scheduler.client.report [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.528487] env[69982]: DEBUG oslo_vmware.api [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139088} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.530561] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.530810] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.530982] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.531165] env[69982]: INFO nova.compute.manager [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1125.531416] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.531903] env[69982]: DEBUG nova.compute.manager [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1125.532035] env[69982]: DEBUG nova.network.neutron [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1125.740911] env[69982]: DEBUG oslo_vmware.api [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Task: {'id': task-3865348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147508} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.745020] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.745020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.745020] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.745020] env[69982]: INFO nova.compute.manager [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1125.745020] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.745345] env[69982]: DEBUG nova.compute.manager [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1125.745345] env[69982]: DEBUG nova.network.neutron [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1125.899197] env[69982]: DEBUG nova.compute.manager [req-f59968ad-a3c3-4d6e-970a-194fb5f838ee req-4c626d5a-1d16-4253-91e9-7bf6451d6492 service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Received event network-vif-deleted-bef3a859-8f07-4807-8007-407dd8247fe3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.899473] env[69982]: INFO nova.compute.manager [req-f59968ad-a3c3-4d6e-970a-194fb5f838ee req-4c626d5a-1d16-4253-91e9-7bf6451d6492 service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Neutron deleted interface bef3a859-8f07-4807-8007-407dd8247fe3; detaching it from the instance and deleting it from the info cache [ 1125.899692] env[69982]: DEBUG nova.network.neutron [req-f59968ad-a3c3-4d6e-970a-194fb5f838ee req-4c626d5a-1d16-4253-91e9-7bf6451d6492 service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.021765] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.022292] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1126.027432] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 24.639s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.050460] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.050724] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.050908] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.275291] env[69982]: DEBUG nova.compute.manager [req-85fff372-4637-4820-8f5d-76b984384435 req-f163fa43-ee2d-452c-86a4-1621361dce53 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Received event network-vif-deleted-332199ea-71d3-4b36-8f64-bdad19027689 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.275505] env[69982]: INFO nova.compute.manager [req-85fff372-4637-4820-8f5d-76b984384435 req-f163fa43-ee2d-452c-86a4-1621361dce53 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Neutron deleted interface 332199ea-71d3-4b36-8f64-bdad19027689; detaching it from the instance and deleting it from the info cache [ 1126.275736] env[69982]: DEBUG nova.network.neutron [req-85fff372-4637-4820-8f5d-76b984384435 req-f163fa43-ee2d-452c-86a4-1621361dce53 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.345422] env[69982]: DEBUG nova.network.neutron [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.401944] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1126.401944] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1126.401944] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f956517a-c196-4a82-abfc-fde9b7faafd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.408502] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f8d03a4-5373-4c39-9d09-2523f0fa4b46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.415372] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1126.415531] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1126.422054] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-36ab0863-8943-465e-b113-179379174a38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.427980] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2670c07-b1e0-4cfa-9f15-83268e373d3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.443015] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ae9af55b-1464-463c-b190-6a35edf60276 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.284s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.470709] env[69982]: DEBUG nova.compute.manager [req-f59968ad-a3c3-4d6e-970a-194fb5f838ee req-4c626d5a-1d16-4253-91e9-7bf6451d6492 service nova] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Detach interface failed, port_id=bef3a859-8f07-4807-8007-407dd8247fe3, reason: Instance d10aaf26-7100-4313-bd57-d2cfefb16e3f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1126.541995] env[69982]: DEBUG nova.compute.utils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1126.543673] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1126.543945] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1126.553573] env[69982]: INFO nova.compute.claims [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.623769] env[69982]: DEBUG nova.policy [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99858c5fbda7454cab0188cf368e51f7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '83b53a0998874810b5302415624592cd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1126.675763] env[69982]: DEBUG oslo_vmware.rw_handles [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5265023b-6522-0b93-76e8-6d4912534816/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1126.675934] env[69982]: INFO nova.virt.vmwareapi.images [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Downloaded image file data 07979a20-baf7-482a-918a-853610d09226 [ 1126.677463] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df52254-3ce8-4937-aab4-4044bf2284f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.680818] env[69982]: DEBUG nova.network.neutron [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.706029] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55ca3096-b32f-481b-a117-753af1ac2024 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.737728] env[69982]: INFO nova.virt.vmwareapi.images [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] The imported VM was unregistered [ 1126.741193] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1126.741466] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Creating directory with path [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1126.742238] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-342ec8d9-c1c2-4ce3-b3bf-ea5c27419436 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.761124] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Created directory with path [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1126.761345] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164/OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164.vmdk to [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk. {{(pid=69982) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1126.761643] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-464520b6-c4e6-4c44-9b0a-7e245d8ea9b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.769756] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1126.769756] env[69982]: value = "task-3865351" [ 1126.769756] env[69982]: _type = "Task" [ 1126.769756] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.781083] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.781402] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64885002-7226-4317-b6cb-61ab4248947b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.792809] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387154e1-0716-4211-bc9b-1ac7068a0b39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.826440] env[69982]: DEBUG nova.compute.manager [req-85fff372-4637-4820-8f5d-76b984384435 req-f163fa43-ee2d-452c-86a4-1621361dce53 service nova] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Detach interface failed, port_id=332199ea-71d3-4b36-8f64-bdad19027689, reason: Instance f8107863-4eb1-4b8e-937e-30dc1e276f33 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1126.850090] env[69982]: INFO nova.compute.manager [-] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Took 1.32 seconds to deallocate network for instance. [ 1127.049261] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1127.063863] env[69982]: INFO nova.compute.resource_tracker [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating resource usage from migration b5bad333-1770-474d-ba7d-7b8924a8217b [ 1127.120788] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.121118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.121563] env[69982]: DEBUG nova.objects.instance [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid cf08cf32-f3d4-494f-a51b-a40616e76429 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.125158] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Successfully created port: d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1127.183897] env[69982]: INFO nova.compute.manager [-] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Took 1.44 seconds to deallocate network for instance. [ 1127.282422] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.360499] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.445050] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.445444] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.445444] env[69982]: DEBUG nova.network.neutron [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1127.462873] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999942d3-5639-47d1-9c33-e536d645a64c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.472937] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0f1ac1-356c-45f1-b71a-fc653f29454d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.518820] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8359537-c573-4474-8028-e3b1c9519787 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.527988] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6694079-2bb8-44e8-8aa4-cf19c81d39b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.544791] env[69982]: DEBUG nova.compute.provider_tree [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1127.692451] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.713515] env[69982]: DEBUG nova.objects.instance [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid cf08cf32-f3d4-494f-a51b-a40616e76429 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.781905] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.050390] env[69982]: DEBUG nova.scheduler.client.report [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.064412] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1128.099251] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1128.099582] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.099739] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1128.099926] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.100089] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1128.100244] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1128.100456] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1128.100676] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1128.100913] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1128.101118] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1128.101299] env[69982]: DEBUG nova.virt.hardware [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1128.102269] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f6a75b-18c9-4500-bab4-d7dacf34cb99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.111906] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8456164b-1742-4080-9fc4-167d2fae3785 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.196833] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.197170] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.197487] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.197881] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.198142] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.201435] env[69982]: INFO nova.compute.manager [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Terminating instance [ 1128.216964] env[69982]: DEBUG nova.objects.base [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1128.217199] env[69982]: DEBUG nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1128.283708] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.285465] env[69982]: DEBUG nova.policy [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.316506] env[69982]: DEBUG nova.network.neutron [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.558447] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.531s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.558741] env[69982]: INFO nova.compute.manager [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Migrating [ 1128.567181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.305s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.567445] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.569670] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.173s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.571289] env[69982]: INFO nova.compute.claims [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1128.624025] env[69982]: INFO nova.scheduler.client.report [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocations for instance fc46eca6-6a60-477c-98de-a2e1f6c7e88b [ 1128.707237] env[69982]: DEBUG nova.compute.manager [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1128.707636] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1128.709647] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbffc15-9035-4dcb-a0cb-7b4e3778413d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.724803] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1128.726056] env[69982]: DEBUG nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Successfully created port: b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.728171] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20a66b35-1222-49b7-afe2-49c93533db43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.736925] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1128.736925] env[69982]: value = "task-3865353" [ 1128.736925] env[69982]: _type = "Task" [ 1128.736925] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.749024] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865353, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.782743] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.819258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.007347] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Successfully updated port: d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1129.047302] env[69982]: DEBUG nova.compute.manager [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Received event network-vif-plugged-d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.047529] env[69982]: DEBUG oslo_concurrency.lockutils [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] Acquiring lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.047745] env[69982]: DEBUG oslo_concurrency.lockutils [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.047944] env[69982]: DEBUG oslo_concurrency.lockutils [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.049100] env[69982]: DEBUG nova.compute.manager [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] No waiting events found dispatching network-vif-plugged-d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1129.049410] env[69982]: WARNING nova.compute.manager [req-3b0f8f86-c41f-4626-95e4-39396c08e9a1 req-c644ed28-ccd3-4d22-ba12-c27eba88cd2a service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Received unexpected event network-vif-plugged-d1958f5a-bc24-4a57-b409-5250e0302301 for instance with vm_state building and task_state spawning. [ 1129.083364] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.083683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.083929] env[69982]: DEBUG nova.network.neutron [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1129.128059] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.128327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.133287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a413594b-e7ac-4a65-8bdc-2cf81237cbc4 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fc46eca6-6a60-477c-98de-a2e1f6c7e88b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.313s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.247585] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865353, 'name': PowerOffVM_Task, 'duration_secs': 0.33999} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.247889] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.248081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.248480] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0992fa6-84b0-4b33-9fd6-068a67a8c458 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.283189] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.350749] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d89499-e568-40e5-819b-9f209c4dcbf1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.373047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446f3411-e6cd-4170-bd3e-e1e3709d731d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.376078] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.376297] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.376474] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleting the datastore file [datastore2] 6d390a12-bfb4-4d91-9e83-a81560a08e1a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.376728] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db902626-2c18-4990-9f36-da2a9dcaf6ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.383124] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1129.388054] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1129.388054] env[69982]: value = "task-3865355" [ 1129.388054] env[69982]: _type = "Task" [ 1129.388054] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.396952] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.511922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.512220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.512416] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1129.634176] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1129.784102] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865351, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.906458} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.784454] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164/OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164.vmdk to [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk. [ 1129.784639] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Cleaning up location [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1129.785014] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_ba36b660-c1f5-4ba9-a93b-e82ec6f8d164 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.785104] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e450213f-a8fe-44e0-92ee-e1b6ac39083b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.793277] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1129.793277] env[69982]: value = "task-3865356" [ 1129.793277] env[69982]: _type = "Task" [ 1129.793277] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.806385] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865356, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.857653] env[69982]: DEBUG nova.network.neutron [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [{"id": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "address": "fa:16:3e:21:93:f4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3a7809c-bd", "ovs_interfaceid": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.891749] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.892467] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3454af36-6ade-4ff5-9f4c-17f784bd3573 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.905177] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865355, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.910348] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1129.910348] env[69982]: value = "task-3865357" [ 1129.910348] env[69982]: _type = "Task" [ 1129.910348] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.920931] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.925065] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e33e6a-6596-44ab-835a-819de179c8b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.935459] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518ba376-33e8-4903-a419-b7b9d35452a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.973479] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309351c2-b7ea-43bc-ba9d-fbe11460b453 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.985356] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28496b7-eefe-4d6a-b044-959542eacab1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.007855] env[69982]: DEBUG nova.compute.provider_tree [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.047043] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1130.167859] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.254916] env[69982]: DEBUG nova.network.neutron [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Updating instance_info_cache with network_info: [{"id": "d1958f5a-bc24-4a57-b409-5250e0302301", "address": "fa:16:3e:c5:80:be", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1958f5a-bc", "ovs_interfaceid": "d1958f5a-bc24-4a57-b409-5250e0302301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.306786] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865356, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120345} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.307109] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.307276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.307556] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk to [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1130.307820] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a111fbca-bc26-4c81-8a89-b0bca388ac5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.315279] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1130.315279] env[69982]: value = "task-3865358" [ 1130.315279] env[69982]: _type = "Task" [ 1130.315279] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.324936] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.363384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.403131] env[69982]: DEBUG oslo_vmware.api [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865355, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.776917} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.403413] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1130.403593] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1130.403781] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1130.403980] env[69982]: INFO nova.compute.manager [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1130.404245] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.404442] env[69982]: DEBUG nova.compute.manager [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1130.404536] env[69982]: DEBUG nova.network.neutron [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1130.424323] env[69982]: DEBUG oslo_vmware.api [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865357, 'name': PowerOnVM_Task, 'duration_secs': 0.431129} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.424628] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.424844] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ab4a7ce2-b30a-4bdc-b6cc-f3fad23675d0 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance '4597a0b8-2c04-4755-8e0d-e00e5cdaacd7' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1130.513779] env[69982]: DEBUG nova.scheduler.client.report [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.673397] env[69982]: DEBUG nova.compute.manager [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-vif-plugged-b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1130.673695] env[69982]: DEBUG oslo_concurrency.lockutils [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.673968] env[69982]: DEBUG oslo_concurrency.lockutils [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.674275] env[69982]: DEBUG oslo_concurrency.lockutils [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.674450] env[69982]: DEBUG nova.compute.manager [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] No waiting events found dispatching network-vif-plugged-b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1130.674645] env[69982]: WARNING nova.compute.manager [req-b994c985-3b38-47d7-8b1f-e4d695336128 req-81db76a1-2db6-4c7c-b468-5312583885d0 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received unexpected event network-vif-plugged-b3ad6672-fd65-45f5-8885-0d0a36722a5e for instance with vm_state active and task_state None. [ 1130.758063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.758261] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Instance network_info: |[{"id": "d1958f5a-bc24-4a57-b409-5250e0302301", "address": "fa:16:3e:c5:80:be", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1958f5a-bc", "ovs_interfaceid": "d1958f5a-bc24-4a57-b409-5250e0302301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1130.758546] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:80:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '245efab9-c420-438e-a0b8-906357ef62c1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1958f5a-bc24-4a57-b409-5250e0302301', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1130.766454] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1130.766711] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1130.766951] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f84c12d-1c7b-49a3-a6bb-2b588372ce00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.788326] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1130.788326] env[69982]: value = "task-3865360" [ 1130.788326] env[69982]: _type = "Task" [ 1130.788326] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.797739] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865360, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.826199] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.019060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.019060] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1131.030156] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.700s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.031030] env[69982]: DEBUG nova.objects.instance [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid 9c0d0f4f-9e88-4e67-99d9-d957652587cd {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1131.094389] env[69982]: DEBUG nova.compute.manager [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Received event network-changed-d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.094389] env[69982]: DEBUG nova.compute.manager [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Refreshing instance network info cache due to event network-changed-d1958f5a-bc24-4a57-b409-5250e0302301. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1131.095585] env[69982]: DEBUG oslo_concurrency.lockutils [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] Acquiring lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.095936] env[69982]: DEBUG oslo_concurrency.lockutils [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] Acquired lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.097914] env[69982]: DEBUG nova.network.neutron [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Refreshing network info cache for port d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1131.241932] env[69982]: DEBUG nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Successfully updated port: b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.301402] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865360, 'name': CreateVM_Task, 'duration_secs': 0.43876} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.301600] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1131.302349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.302537] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.303047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1131.303312] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-619b3b04-3f24-403d-854b-caf8c64a0044 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.310665] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1131.310665] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522fa8e1-44d7-b2d0-6115-f688301757d4" [ 1131.310665] env[69982]: _type = "Task" [ 1131.310665] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.324701] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522fa8e1-44d7-b2d0-6115-f688301757d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.331432] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.538132] env[69982]: DEBUG nova.compute.utils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1131.541624] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1131.617102] env[69982]: DEBUG nova.network.neutron [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.746565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.746882] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.747885] env[69982]: DEBUG nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1131.824822] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522fa8e1-44d7-b2d0-6115-f688301757d4, 'name': SearchDatastore_Task, 'duration_secs': 0.016437} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.828184] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.828500] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1131.828743] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1131.828893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.829092] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1131.829926] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29bed202-cbbc-4fbf-883f-193abf66c817 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.836018] env[69982]: DEBUG nova.network.neutron [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Updated VIF entry in instance network info cache for port d1958f5a-bc24-4a57-b409-5250e0302301. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1131.836700] env[69982]: DEBUG nova.network.neutron [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Updating instance_info_cache with network_info: [{"id": "d1958f5a-bc24-4a57-b409-5250e0302301", "address": "fa:16:3e:c5:80:be", "network": {"id": "64c15f88-c624-41f9-9fdb-be8a3c83d52e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-343931246-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "83b53a0998874810b5302415624592cd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "245efab9-c420-438e-a0b8-906357ef62c1", "external-id": "nsx-vlan-transportzone-959", "segmentation_id": 959, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1958f5a-bc", "ovs_interfaceid": "d1958f5a-bc24-4a57-b409-5250e0302301", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.837836] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.843340] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4419c6-0c5a-407b-82b0-c81a083af0da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.847571] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1131.847786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1131.849632] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c491e5-79a4-4d2a-808e-7b5335144085 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.856238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728273fd-0d94-411c-8831-840077ebfe59 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.861406] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1131.861406] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c85e3c-62b3-8cb7-4418-a75aeb8320e9" [ 1131.861406] env[69982]: _type = "Task" [ 1131.861406] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.897481] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9c1b2b-2995-497d-b15c-9ea31904a8c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.904636] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c85e3c-62b3-8cb7-4418-a75aeb8320e9, 'name': SearchDatastore_Task, 'duration_secs': 0.013662} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.905515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2de8741-03ea-4328-be31-eba8f173539d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.909803] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75321ae3-ecb8-4102-902f-b1c1f35e23ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.918832] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33a58f2-d98d-47ce-8c21-a1fc3b25284e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.937100] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1131.937100] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5206ebd0-adfc-2d7a-f070-4fba010ec84c" [ 1131.937100] env[69982]: _type = "Task" [ 1131.937100] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.937548] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.960823] env[69982]: DEBUG nova.compute.provider_tree [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.971535] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5206ebd0-adfc-2d7a-f070-4fba010ec84c, 'name': SearchDatastore_Task, 'duration_secs': 0.015791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.973967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.973967] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fea9d096-ee82-4ad1-a799-ef7aaf5026a2/fea9d096-ee82-4ad1-a799-ef7aaf5026a2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1131.973967] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96feea04-8c57-4139-91ff-55b3baa45737 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.984022] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1131.984022] env[69982]: value = "task-3865361" [ 1131.984022] env[69982]: _type = "Task" [ 1131.984022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.992909] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865361, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.042193] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1132.120789] env[69982]: INFO nova.compute.manager [-] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Took 1.72 seconds to deallocate network for instance. [ 1132.288766] env[69982]: WARNING nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1132.331492] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.339381] env[69982]: DEBUG oslo_concurrency.lockutils [req-5580df80-c96d-4a25-a85c-5fdd5908b0e3 req-bc47facd-6a45-4e66-b7bc-f4cb25ee4809 service nova] Releasing lock "refresh_cache-fea9d096-ee82-4ad1-a799-ef7aaf5026a2" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.445905] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1132.445905] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48016f7f-3a28-49bf-a6da-0b95cf63405a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.455129] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1132.455129] env[69982]: value = "task-3865362" [ 1132.455129] env[69982]: _type = "Task" [ 1132.455129] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.474027] env[69982]: DEBUG nova.scheduler.client.report [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.474821] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.496637] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865361, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.629258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.696839] env[69982]: DEBUG nova.network.neutron [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3ad6672-fd65-45f5-8885-0d0a36722a5e", "address": "fa:16:3e:ab:cf:9a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3ad6672-fd", "ovs_interfaceid": "b3ad6672-fd65-45f5-8885-0d0a36722a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.827640] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.965534] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865362, 'name': PowerOffVM_Task, 'duration_secs': 0.504315} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.965922] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1132.966179] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.977088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.947s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.979361] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.741s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.980817] env[69982]: INFO nova.compute.claims [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1132.993086] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865361, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661882} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.993392] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] fea9d096-ee82-4ad1-a799-ef7aaf5026a2/fea9d096-ee82-4ad1-a799-ef7aaf5026a2.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1132.993633] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1132.993923] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-77e06ef3-48ff-4661-92be-e68b12dee02c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.001159] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1133.001159] env[69982]: value = "task-3865364" [ 1133.001159] env[69982]: _type = "Task" [ 1133.001159] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.002186] env[69982]: INFO nova.scheduler.client.report [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance 9c0d0f4f-9e88-4e67-99d9-d957652587cd [ 1133.016064] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.056103] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.088129] env[69982]: DEBUG nova.virt.hardware [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.089429] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2067507-701d-4caa-ad56-6a9a6787a513 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.098881] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34669ff-d70e-4054-b7ec-fe242b478645 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.113734] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.120395] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Creating folder: Project (08f4107a79084944ad23b4d5878944a3). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1133.121032] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43f6fc89-2e52-4f85-b6a8-2664e1ec82de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.131703] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Created folder: Project (08f4107a79084944ad23b4d5878944a3) in parent group-v767796. [ 1133.132168] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Creating folder: Instances. Parent ref: group-v768076. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1133.132168] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba3bb19b-51ea-48d6-b7b3-64a570afeec6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.141613] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Created folder: Instances in parent group-v768076. [ 1133.141777] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1133.141995] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1133.142236] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1a1373c7-2ff7-4979-b3d5-a16e7ec40beb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.159616] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.159616] env[69982]: value = "task-3865367" [ 1133.159616] env[69982]: _type = "Task" [ 1133.159616] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.167920] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865367, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.172972] env[69982]: DEBUG nova.compute.manager [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Received event network-vif-deleted-a8e19350-a6cb-4da2-a745-4a03db30a50b {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1133.173195] env[69982]: DEBUG nova.compute.manager [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-changed-b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1133.173368] env[69982]: DEBUG nova.compute.manager [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing instance network info cache due to event network-changed-b3ad6672-fd65-45f5-8885-0d0a36722a5e. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1133.173554] env[69982]: DEBUG oslo_concurrency.lockutils [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.199580] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.200337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.200510] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.200879] env[69982]: DEBUG oslo_concurrency.lockutils [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.201089] env[69982]: DEBUG nova.network.neutron [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Refreshing network info cache for port b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1133.203142] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03de5c76-f9cb-4765-88f0-b74833b1ed6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.221375] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.221635] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.221824] env[69982]: DEBUG nova.compute.manager [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Going to confirm migration 6 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1133.223615] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.223898] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.224088] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.224282] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.224429] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.224578] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.224791] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.224953] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.225146] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.225338] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.225516] env[69982]: DEBUG nova.virt.hardware [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.232063] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfiguring VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1133.233327] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c3c95aa-c005-4cbd-8b29-f569050f3c3f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.252755] env[69982]: DEBUG oslo_vmware.api [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1133.252755] env[69982]: value = "task-3865368" [ 1133.252755] env[69982]: _type = "Task" [ 1133.252755] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.261893] env[69982]: DEBUG oslo_vmware.api [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865368, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.328013] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865358, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.53475} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.328290] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/07979a20-baf7-482a-918a-853610d09226/07979a20-baf7-482a-918a-853610d09226.vmdk to [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1133.329150] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b47e222-3875-40e6-8d35-2b93ed2dc581 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.352675] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.353399] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-541da309-a5d6-4f97-892d-a1375ebc86c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.377893] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1133.377893] env[69982]: value = "task-3865369" [ 1133.377893] env[69982]: _type = "Task" [ 1133.377893] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.390017] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865369, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.472768] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1133.473059] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1133.473162] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1133.473331] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1133.473474] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1133.473621] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1133.473888] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1133.473982] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1133.474173] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1133.474326] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1133.474499] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1133.479709] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc848789-7b23-4cc5-ae8c-4c75aa3e6f5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.500062] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1133.500062] env[69982]: value = "task-3865370" [ 1133.500062] env[69982]: _type = "Task" [ 1133.500062] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.517316] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865370, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.521345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3470cbe8-dfe3-45df-baf9-749a6628e5c9 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "9c0d0f4f-9e88-4e67-99d9-d957652587cd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.627s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.522656] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07668} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.523502] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1133.524580] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-557f58d5-27ad-4a13-8905-a91fa78b6521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.550585] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] fea9d096-ee82-4ad1-a799-ef7aaf5026a2/fea9d096-ee82-4ad1-a799-ef7aaf5026a2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.551596] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85bf307d-3009-4603-baf4-d8751731cdbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.573295] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1133.573295] env[69982]: value = "task-3865371" [ 1133.573295] env[69982]: _type = "Task" [ 1133.573295] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.582741] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865371, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.670738] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865367, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.766061] env[69982]: DEBUG oslo_vmware.api [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865368, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.850965] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.851309] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.851627] env[69982]: DEBUG nova.network.neutron [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1133.851948] env[69982]: DEBUG nova.objects.instance [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'info_cache' on Instance uuid 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.897026] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865369, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.012059] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865370, 'name': ReconfigVM_Task, 'duration_secs': 0.441264} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.012059] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1134.024491] env[69982]: DEBUG nova.network.neutron [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updated VIF entry in instance network info cache for port b3ad6672-fd65-45f5-8885-0d0a36722a5e. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1134.024982] env[69982]: DEBUG nova.network.neutron [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b3ad6672-fd65-45f5-8885-0d0a36722a5e", "address": "fa:16:3e:ab:cf:9a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3ad6672-fd", "ovs_interfaceid": "b3ad6672-fd65-45f5-8885-0d0a36722a5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.089537] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865371, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.175503] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865367, 'name': CreateVM_Task, 'duration_secs': 0.775351} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.176014] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1134.176663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.176663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.176949] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1134.177211] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f36dbddf-4114-4e48-ad47-8efb8d27ca61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.183525] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1134.183525] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52225a9a-822c-629e-6b60-f44507d0d750" [ 1134.183525] env[69982]: _type = "Task" [ 1134.183525] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.201769] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52225a9a-822c-629e-6b60-f44507d0d750, 'name': SearchDatastore_Task, 'duration_secs': 0.012657} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.202271] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.202738] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.203201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.203454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.203737] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.208066] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-144f4a1e-6b3a-4739-9160-d7bad3ae9cb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.221708] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.221992] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1134.225566] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8046fb-446b-4184-84bd-a49079ae5dc7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.233060] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1134.233060] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255cf90-c568-ace1-f720-5117bb8d3d4f" [ 1134.233060] env[69982]: _type = "Task" [ 1134.233060] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.242594] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255cf90-c568-ace1-f720-5117bb8d3d4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.272290] env[69982]: DEBUG oslo_vmware.api [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865368, 'name': ReconfigVM_Task, 'duration_secs': 0.79938} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.272349] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.272707] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfigured VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1134.305978] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218d994a-41c3-4c1f-a738-a731d536bdc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.317197] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c79b19-745d-4ffa-acb8-5c0058fac1a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.352107] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741f8b4f-4f98-41c6-a391-7653ac92abaa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.364041] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00d4143-160a-4db6-afed-5e87b4c423e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.382412] env[69982]: DEBUG nova.compute.provider_tree [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1134.392425] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865369, 'name': ReconfigVM_Task, 'duration_secs': 0.911547} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.396026] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Reconfigured VM instance instance-0000004c to attach disk [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b/ad43c35a-69bc-4c84-8869-cfde6f516b9b.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.396026] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1db8b1e3-c02a-4727-b387-ff6561d45227 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.402236] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1134.402236] env[69982]: value = "task-3865372" [ 1134.402236] env[69982]: _type = "Task" [ 1134.402236] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.413432] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865372, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.513425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.513716] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.519071] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1134.519352] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1134.519529] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1134.519748] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1134.520032] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1134.520195] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1134.520429] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1134.520642] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1134.520861] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1134.521058] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1134.521247] env[69982]: DEBUG nova.virt.hardware [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1134.527550] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1134.528744] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-558b8008-22e8-41de-b317-1e735aed88ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.543842] env[69982]: DEBUG oslo_concurrency.lockutils [req-7c1aade2-a625-4968-9320-799ea072ffa7 req-7680f11c-7f05-4521-bd73-657b57542bcb service nova] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.552775] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1134.552775] env[69982]: value = "task-3865373" [ 1134.552775] env[69982]: _type = "Task" [ 1134.552775] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.566364] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865373, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.591561] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865371, 'name': ReconfigVM_Task, 'duration_secs': 0.841354} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.591930] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Reconfigured VM instance instance-00000065 to attach disk [datastore2] fea9d096-ee82-4ad1-a799-ef7aaf5026a2/fea9d096-ee82-4ad1-a799-ef7aaf5026a2.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1134.592741] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bd0bf58-d8fb-48a5-9097-a236632f9115 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.601647] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1134.601647] env[69982]: value = "task-3865374" [ 1134.601647] env[69982]: _type = "Task" [ 1134.601647] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.618974] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865374, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.745972] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5255cf90-c568-ace1-f720-5117bb8d3d4f, 'name': SearchDatastore_Task, 'duration_secs': 0.015009} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.746899] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dbf4ece-05e5-43e5-a35f-3024748ca2d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.753836] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1134.753836] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cf5f7-f2b8-401d-dd72-358ad2edab3e" [ 1134.753836] env[69982]: _type = "Task" [ 1134.753836] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.763038] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cf5f7-f2b8-401d-dd72-358ad2edab3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.783235] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5d0455a0-bedf-46ec-b6e0-dd9aab1b9a02 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.662s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.888847] env[69982]: DEBUG nova.scheduler.client.report [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.915475] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865372, 'name': Rename_Task, 'duration_secs': 0.247872} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.916282] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1134.916594] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ebde73ee-173a-4ac5-8446-5b56a158beb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.925664] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1134.925664] env[69982]: value = "task-3865375" [ 1134.925664] env[69982]: _type = "Task" [ 1134.925664] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.936959] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.016926] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.065166] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865373, 'name': ReconfigVM_Task, 'duration_secs': 0.216886} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.065166] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1135.065644] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a8ea80-2e2b-48c7-aaaf-58d87e15df28 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.096481] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1135.096827] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88283713-719c-4db8-9ad0-87b887a13b8d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.124863] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865374, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.126636] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1135.126636] env[69982]: value = "task-3865376" [ 1135.126636] env[69982]: _type = "Task" [ 1135.126636] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.137965] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865376, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.151391] env[69982]: DEBUG nova.network.neutron [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [{"id": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "address": "fa:16:3e:02:d2:06", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc816f035-7c", "ovs_interfaceid": "c816f035-7c9d-47ba-8b3b-29a57ec10561", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.266942] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]522cf5f7-f2b8-401d-dd72-358ad2edab3e, 'name': SearchDatastore_Task, 'duration_secs': 0.028686} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.266942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.266942] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 28518353-0bff-460f-8384-f0376280917d/28518353-0bff-460f-8384-f0376280917d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1135.267259] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27ce22f8-1eb1-4b7c-bb58-d26ce653f07b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.275542] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1135.275542] env[69982]: value = "task-3865377" [ 1135.275542] env[69982]: _type = "Task" [ 1135.275542] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.285222] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.394817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.395695] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1135.400029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.268s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.402336] env[69982]: INFO nova.compute.claims [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1135.438356] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865375, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.545688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.624182] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865374, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.636590] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.654472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.655560] env[69982]: DEBUG nova.objects.instance [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'migration_context' on Instance uuid 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1135.786431] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.908709] env[69982]: DEBUG nova.compute.utils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1135.914168] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1135.914517] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1135.943367] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865375, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.013384] env[69982]: DEBUG nova.policy [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3eef7e0eac494f6187b7bd324a63be17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cfd55ee4c33142449b7f61928fba77d7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1136.125417] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865374, 'name': Rename_Task, 'duration_secs': 1.205566} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.125859] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1136.126194] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df576a9f-8483-4cc7-a1a5-b5447ccda230 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.139590] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1136.139590] env[69982]: value = "task-3865378" [ 1136.139590] env[69982]: _type = "Task" [ 1136.139590] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.146743] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865376, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.158401] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865378, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.160436] env[69982]: DEBUG nova.objects.base [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Object Instance<4597a0b8-2c04-4755-8e0d-e00e5cdaacd7> lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1136.161423] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfc86a2-1d35-4731-b337-01bbf0f52efc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.189943] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb1bb111-ea0b-40ee-84dd-17bf8bfc8fbd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.200970] env[69982]: DEBUG oslo_vmware.api [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1136.200970] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269607b-fc80-0ad2-0366-6b536e9316f6" [ 1136.200970] env[69982]: _type = "Task" [ 1136.200970] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.213972] env[69982]: DEBUG oslo_vmware.api [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269607b-fc80-0ad2-0366-6b536e9316f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.293910] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.982078} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.294833] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 28518353-0bff-460f-8384-f0376280917d/28518353-0bff-460f-8384-f0376280917d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1136.296200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.296200] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b072e31-ede9-4fbd-93ef-d21467193142 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.307729] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1136.307729] env[69982]: value = "task-3865379" [ 1136.307729] env[69982]: _type = "Task" [ 1136.307729] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.320588] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.415549] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1136.439850] env[69982]: DEBUG oslo_vmware.api [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865375, 'name': PowerOnVM_Task, 'duration_secs': 1.04908} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.439931] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1136.485332] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Successfully created port: 26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1136.589585] env[69982]: DEBUG nova.compute.manager [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1136.590828] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c4c6f9-3b9f-4270-afe9-f60db3bddd1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.642219] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865376, 'name': ReconfigVM_Task, 'duration_secs': 1.063684} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.642524] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a/642b42eb-eeef-401c-8feb-032d783c645a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1136.642811] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1136.659391] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865378, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.714648] env[69982]: DEBUG oslo_vmware.api [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5269607b-fc80-0ad2-0366-6b536e9316f6, 'name': SearchDatastore_Task, 'duration_secs': 0.015559} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.715076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.765254] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2b1eb8-9e5b-46f0-bf43-4fa704673592 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.776318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e8a185-b039-4444-896c-f5d01cb74388 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.781494] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-b3ad6672-fd65-45f5-8885-0d0a36722a5e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.781494] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-b3ad6672-fd65-45f5-8885-0d0a36722a5e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.818453] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe7e7d9-98cf-4e6f-bc0e-f855ffa695f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.829554] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078504} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.832634] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.833606] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8830ff5a-08a6-490f-ab79-c0ff8b0a7f1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.837174] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4375ed5e-fc61-4f75-8d65-4975ae249c5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.853342] env[69982]: DEBUG nova.compute.provider_tree [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1136.871231] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 28518353-0bff-460f-8384-f0376280917d/28518353-0bff-460f-8384-f0376280917d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.873256] env[69982]: DEBUG nova.scheduler.client.report [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.876673] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c3d4f3f-95f6-4c5e-a96b-65acc3207b82 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.897755] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.898058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.902716] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1136.902716] env[69982]: value = "task-3865380" [ 1136.902716] env[69982]: _type = "Task" [ 1136.902716] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.914605] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865380, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.108890] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6869c107-3c35-4e93-98a2-162af280e16f tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 43.105s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.153164] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b039271b-e31b-4a0f-8851-ea4e6095ed85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.162158] env[69982]: DEBUG oslo_vmware.api [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865378, 'name': PowerOnVM_Task, 'duration_secs': 0.596549} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.175507] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1137.175738] env[69982]: INFO nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1137.175919] env[69982]: DEBUG nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1137.176824] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7e2860-b855-43a7-996a-3f7e32368f71 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.179982] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8585408-f9f3-4aab-a9d1-1126f53e859f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.203529] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1137.285046] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.285208] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.286063] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09473d9-5358-4c02-bbe0-c125fed50fbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.303928] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa06ee0-e28f-4fa8-85a5-bde35746c8f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.331356] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfiguring VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1137.331689] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e204854-5102-4973-ad85-321538968cf0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.352024] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1137.352024] env[69982]: value = "task-3865381" [ 1137.352024] env[69982]: _type = "Task" [ 1137.352024] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.360829] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.394531] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.994s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.395116] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1137.397934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 18.493s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.400664] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1137.414158] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.436209] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1137.473498] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1137.473783] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1137.473935] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1137.478016] env[69982]: DEBUG nova.virt.hardware [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1137.478016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23cea884-6e79-4be9-af47-ff6b47f27089 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.487133] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bd9755-1425-423e-ad84-ee30ce9638f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.717770] env[69982]: INFO nova.compute.manager [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Took 37.25 seconds to build instance. [ 1137.775576] env[69982]: DEBUG nova.network.neutron [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Port b3a7809c-bd2b-4433-aab2-dc4c413eff31 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1137.866731] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.914892] env[69982]: DEBUG nova.compute.utils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.920770] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.920770] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.945975] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865380, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.948554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.022058] env[69982]: DEBUG nova.policy [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64533b0ad8894d41bdf9fe921b440063', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '820dcd8333cb4a678ef562e4150518d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1138.057319] env[69982]: DEBUG nova.compute.manager [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Received event network-vif-plugged-26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.057556] env[69982]: DEBUG oslo_concurrency.lockutils [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] Acquiring lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.057784] env[69982]: DEBUG oslo_concurrency.lockutils [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.057956] env[69982]: DEBUG oslo_concurrency.lockutils [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.058277] env[69982]: DEBUG nova.compute.manager [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] No waiting events found dispatching network-vif-plugged-26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1138.058463] env[69982]: WARNING nova.compute.manager [req-1324e12a-7d85-416b-ae65-45b00bea5c7e req-d14cbcb5-47e2-44b4-968e-3918c404f123 service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Received unexpected event network-vif-plugged-26a5b972-58d8-44a6-abfb-c79dd1301e05 for instance with vm_state building and task_state spawning. [ 1138.220242] env[69982]: DEBUG oslo_concurrency.lockutils [None req-83db5c16-dd42-4eaf-b82b-15f1eb66802d tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.757s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.317781] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Successfully updated port: 26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1138.368018] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.418818] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Applying migration context for instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 as it has an incoming, in-progress migration 8f21cc6b-0f54-4ba0-b9f2-3dc19533f868. Migration status is confirming {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1138.419196] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Applying migration context for instance 642b42eb-eeef-401c-8feb-032d783c645a as it has an incoming, in-progress migration b5bad333-1770-474d-ba7d-7b8924a8217b. Migration status is post-migrating {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1138.420913] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating resource usage from migration 8f21cc6b-0f54-4ba0-b9f2-3dc19533f868 [ 1138.421275] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating resource usage from migration b5bad333-1770-474d-ba7d-7b8924a8217b [ 1138.427429] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1138.438464] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865380, 'name': ReconfigVM_Task, 'duration_secs': 1.046886} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.438614] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 28518353-0bff-460f-8384-f0376280917d/28518353-0bff-460f-8384-f0376280917d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1138.439453] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b150e81-2b5c-44ba-ac53-0e6d1326daec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.449383] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1138.449383] env[69982]: value = "task-3865382" [ 1138.449383] env[69982]: _type = "Task" [ 1138.449383] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.449383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.449383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.449383] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6d390a12-bfb4-4d91-9e83-a81560a08e1a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1138.449383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance cf08cf32-f3d4-494f-a51b-a40616e76429 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.449383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ad43c35a-69bc-4c84-8869-cfde6f516b9b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.449383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.450034] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.450257] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance d10aaf26-7100-4313-bd57-d2cfefb16e3f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1138.450824] env[69982]: WARNING nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance f8107863-4eb1-4b8e-937e-30dc1e276f33 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1138.450977] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 8f21cc6b-0f54-4ba0-b9f2-3dc19533f868 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1138.451113] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.451229] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance fea9d096-ee82-4ad1-a799-ef7aaf5026a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.451342] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration b5bad333-1770-474d-ba7d-7b8924a8217b is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1138.451453] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 642b42eb-eeef-401c-8feb-032d783c645a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.451561] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 28518353-0bff-460f-8384-f0376280917d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.451673] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 1315a51d-6d0f-4e6c-9ae1-6af96b74104f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.451812] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8089e191-85df-46cd-8a6b-415bfd5d6748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1138.467019] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865382, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.521628] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Successfully created port: 0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1138.801022] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.801360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.801460] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.822824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.823831] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.824071] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1138.866064] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.958366] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4fd5ea57-dc28-4d56-abbc-53a3c71394bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1138.969181] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865382, 'name': Rename_Task, 'duration_secs': 0.154984} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.969181] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1138.969181] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a43ab4f4-6f73-473c-8585-3f5ae5b47130 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.978172] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1138.978172] env[69982]: value = "task-3865383" [ 1138.978172] env[69982]: _type = "Task" [ 1138.978172] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.987596] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865383, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.368470] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.388904] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1139.444488] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1139.462935] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance e5f7aebd-22a8-47fd-9b73-09791aecae56 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1139.477844] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1139.478114] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1139.478285] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1139.478483] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1139.478672] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1139.478900] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1139.479094] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1139.479291] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1139.479477] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1139.479654] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1139.479860] env[69982]: DEBUG nova.virt.hardware [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1139.480849] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0918133e-1025-47e7-b250-0bb3006d694f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.503113] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865383, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.505317] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4972fe-5c58-4483-bcb5-bd49dfc48ecc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.579539] env[69982]: DEBUG nova.network.neutron [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Updating instance_info_cache with network_info: [{"id": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "address": "fa:16:3e:89:a9:60", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26a5b972-58", "ovs_interfaceid": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.755439] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd526a4-5d29-4747-a3f5-13c25f01aa8a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.762601] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Suspending the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1139.763023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-fd2bb5c6-497b-4fc7-a237-dfb3381aa62e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.770610] env[69982]: DEBUG oslo_vmware.api [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1139.770610] env[69982]: value = "task-3865384" [ 1139.770610] env[69982]: _type = "Task" [ 1139.770610] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.780153] env[69982]: DEBUG oslo_vmware.api [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865384, 'name': SuspendVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.839063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.839063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.839063] env[69982]: DEBUG nova.network.neutron [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1139.866136] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.966199] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9815a4f9-3827-4e83-b897-18edadcac55b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1139.966666] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1139.966915] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3328MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1139.991506] env[69982]: DEBUG oslo_vmware.api [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865383, 'name': PowerOnVM_Task, 'duration_secs': 0.619119} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.991506] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1139.991506] env[69982]: INFO nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Took 6.94 seconds to spawn the instance on the hypervisor. [ 1139.991923] env[69982]: DEBUG nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1139.992980] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae598501-e88a-4453-a1bc-7a45741559fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.004749] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.005017] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.005302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.005507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.005682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.008968] env[69982]: INFO nova.compute.manager [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Terminating instance [ 1140.083423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.084141] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Instance network_info: |[{"id": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "address": "fa:16:3e:89:a9:60", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26a5b972-58", "ovs_interfaceid": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1140.084627] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:a9:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f9be6786-e9a7-4138-b7b5-b7696f6cb1e1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26a5b972-58d8-44a6-abfb-c79dd1301e05', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.092716] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.093775] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.095332] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56fd8f69-5ccf-4f02-bbcf-b49ef050f75f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.113500] env[69982]: DEBUG nova.compute.manager [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Received event network-changed-26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.113693] env[69982]: DEBUG nova.compute.manager [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Refreshing instance network info cache due to event network-changed-26a5b972-58d8-44a6-abfb-c79dd1301e05. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1140.113911] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Acquiring lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.114092] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Acquired lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.114265] env[69982]: DEBUG nova.network.neutron [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Refreshing network info cache for port 26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1140.126304] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.126304] env[69982]: value = "task-3865385" [ 1140.126304] env[69982]: _type = "Task" [ 1140.126304] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.138905] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865385, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.198616] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Successfully updated port: 0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1140.281300] env[69982]: DEBUG oslo_vmware.api [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865384, 'name': SuspendVM_Task} progress is 58%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.308274] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01292c39-6d40-4fb5-804d-fb5201949954 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.316398] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a3548f-55d3-4ed6-8379-1c16bb0d395c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.351692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf1d963-b972-4557-af99-984c1b6edc64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.363101] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df84c8f1-da74-44e4-b8b5-5e8619dcd5b7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.371993] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.385894] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.514994] env[69982]: DEBUG nova.compute.manager [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1140.515208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1140.519753] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163acc8f-2ad1-4c41-a3f8-4e727a906318 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.523190] env[69982]: INFO nova.compute.manager [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Took 32.14 seconds to build instance. [ 1140.528993] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1140.529253] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec82942c-af07-4077-a99a-8fa17e2b1745 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.535741] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1140.535741] env[69982]: value = "task-3865386" [ 1140.535741] env[69982]: _type = "Task" [ 1140.535741] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.544783] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.594569] env[69982]: DEBUG nova.network.neutron [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [{"id": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "address": "fa:16:3e:21:93:f4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3a7809c-bd", "ovs_interfaceid": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.636122] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865385, 'name': CreateVM_Task, 'duration_secs': 0.379908} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.636316] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1140.637074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.637218] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.637574] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1140.637839] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-439a1db0-f49b-48b0-950f-333b484cb557 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.644089] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1140.644089] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a587a-8e61-f645-ee59-75f168a5cce0" [ 1140.644089] env[69982]: _type = "Task" [ 1140.644089] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.653076] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a587a-8e61-f645-ee59-75f168a5cce0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.704553] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.704780] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.704921] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1140.783683] env[69982]: DEBUG oslo_vmware.api [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865384, 'name': SuspendVM_Task, 'duration_secs': 1.012197} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.784088] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Suspended the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1140.784333] env[69982]: DEBUG nova.compute.manager [None req-1854abda-5aeb-4b65-beba-064d558d004c tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1140.785174] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43605476-5440-42c7-9dd4-d9831f1f9ba6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.838497] env[69982]: DEBUG nova.network.neutron [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Updated VIF entry in instance network info cache for port 26a5b972-58d8-44a6-abfb-c79dd1301e05. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1140.838868] env[69982]: DEBUG nova.network.neutron [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Updating instance_info_cache with network_info: [{"id": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "address": "fa:16:3e:89:a9:60", "network": {"id": "efcb26fb-0282-4e28-a2f5-8e6818adafa3", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "a1f539316afa4eeb82cb433ea8b6071b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f9be6786-e9a7-4138-b7b5-b7696f6cb1e1", "external-id": "nsx-vlan-transportzone-626", "segmentation_id": 626, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26a5b972-58", "ovs_interfaceid": "26a5b972-58d8-44a6-abfb-c79dd1301e05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.868294] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.891148] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1141.025564] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6cbcaebd-cb67-4803-a751-0cf9e50c1e50 tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.651s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.046228] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865386, 'name': PowerOffVM_Task, 'duration_secs': 0.210323} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.046523] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1141.046693] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1141.046953] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0150a27-0393-4962-b7e0-0ead98a7ea00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.097927] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.111876] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1141.112109] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1141.112360] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleting the datastore file [datastore2] fea9d096-ee82-4ad1-a799-ef7aaf5026a2 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1141.112662] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1659539-0d9e-45db-bca8-26e763e30d42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.122607] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for the task: (returnval){ [ 1141.122607] env[69982]: value = "task-3865388" [ 1141.122607] env[69982]: _type = "Task" [ 1141.122607] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.132825] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865388, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.154364] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a587a-8e61-f645-ee59-75f168a5cce0, 'name': SearchDatastore_Task, 'duration_secs': 0.015744} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.155376] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.155624] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.155859] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.156014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.156205] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.156718] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a24b734e-6f73-4d20-aec1-0d8864d5991a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.165746] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1141.165938] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1141.166724] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e10f2f6-7df5-46c5-bd13-a206ba89b43e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.171950] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1141.171950] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c67a-147d-f533-da12-a1097436fa29" [ 1141.171950] env[69982]: _type = "Task" [ 1141.171950] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.181972] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c67a-147d-f533-da12-a1097436fa29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.262483] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1141.341883] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Releasing lock "refresh_cache-1315a51d-6d0f-4e6c-9ae1-6af96b74104f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.342207] env[69982]: DEBUG nova.compute.manager [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Received event network-vif-plugged-0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.342411] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.342623] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.342858] env[69982]: DEBUG oslo_concurrency.lockutils [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.342956] env[69982]: DEBUG nova.compute.manager [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] No waiting events found dispatching network-vif-plugged-0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.344156] env[69982]: WARNING nova.compute.manager [req-843ad155-427d-458e-807c-7b509ddefefe req-673f4295-3ae4-42d4-a3ea-a224d9b0730b service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Received unexpected event network-vif-plugged-0d321236-4177-49d0-afe0-f203e679fb0a for instance with vm_state building and task_state spawning. [ 1141.367123] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.396372] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1141.396642] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.999s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.396936] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.037s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.397154] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.399785] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.707s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.399785] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.401267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.234s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.402728] env[69982]: INFO nova.compute.claims [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.430313] env[69982]: INFO nova.scheduler.client.report [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted allocations for instance d10aaf26-7100-4313-bd57-d2cfefb16e3f [ 1141.432201] env[69982]: INFO nova.scheduler.client.report [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Deleted allocations for instance f8107863-4eb1-4b8e-937e-30dc1e276f33 [ 1141.444058] env[69982]: DEBUG nova.network.neutron [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.620291] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bb77df-9832-467d-8905-9afc6f0bd73f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.623991] env[69982]: DEBUG nova.compute.manager [None req-fe7c8e9b-6f93-4175-b2d3-1dc788b657ee tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1141.624812] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3041b22a-22ba-4fed-bacf-a18d4adee08d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.635683] env[69982]: DEBUG oslo_vmware.api [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Task: {'id': task-3865388, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178478} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.650259] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1141.650483] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1141.650662] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1141.650839] env[69982]: INFO nova.compute.manager [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1141.651176] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.653336] env[69982]: DEBUG nova.compute.manager [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1141.653440] env[69982]: DEBUG nova.network.neutron [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1141.655607] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f927405-fa81-4f33-9525-e0c1ee57af3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.664457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1141.684077] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5289c67a-147d-f533-da12-a1097436fa29, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.684952] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3877d8e-92c7-431c-8e70-59dd570976a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.691030] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1141.691030] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525f1cfe-3918-5836-e26a-52f602c92f9b" [ 1141.691030] env[69982]: _type = "Task" [ 1141.691030] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.702277] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525f1cfe-3918-5836-e26a-52f602c92f9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.707062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "28518353-0bff-460f-8384-f0376280917d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.707298] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.707493] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "28518353-0bff-460f-8384-f0376280917d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.707676] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.707839] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.709621] env[69982]: INFO nova.compute.manager [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Terminating instance [ 1141.867290] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.943076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6537beb7-44ad-4888-aaca-3b06c9c85abd tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "d10aaf26-7100-4313-bd57-d2cfefb16e3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.122s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.944633] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8c553aba-3ac7-4667-906f-b8c774942db3 tempest-MultipleCreateTestJSON-738689597 tempest-MultipleCreateTestJSON-738689597-project-member] Lock "f8107863-4eb1-4b8e-937e-30dc1e276f33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.058s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.948289] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.948961] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Instance network_info: |[{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1141.949906] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:03:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0d321236-4177-49d0-afe0-f203e679fb0a', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1141.964894] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.967559] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1141.968393] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8ec316e-7492-45d0-b0c5-b43aa356bb79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.992100] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1141.992100] env[69982]: value = "task-3865389" [ 1141.992100] env[69982]: _type = "Task" [ 1141.992100] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.000998] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865389, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.128566] env[69982]: DEBUG nova.compute.manager [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Received event network-changed-0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1142.128864] env[69982]: DEBUG nova.compute.manager [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Refreshing instance network info cache due to event network-changed-0d321236-4177-49d0-afe0-f203e679fb0a. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1142.129300] env[69982]: DEBUG oslo_concurrency.lockutils [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.129515] env[69982]: DEBUG oslo_concurrency.lockutils [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.129706] env[69982]: DEBUG nova.network.neutron [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Refreshing network info cache for port 0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1142.159306] env[69982]: INFO nova.compute.manager [None req-fe7c8e9b-6f93-4175-b2d3-1dc788b657ee tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] instance snapshotting [ 1142.159927] env[69982]: DEBUG nova.objects.instance [None req-fe7c8e9b-6f93-4175-b2d3-1dc788b657ee tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lazy-loading 'flavor' on Instance uuid 28518353-0bff-460f-8384-f0376280917d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.169984] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1142.170311] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-157069b2-1d43-441b-b784-d14397234758 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.179071] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1142.179071] env[69982]: value = "task-3865390" [ 1142.179071] env[69982]: _type = "Task" [ 1142.179071] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.188044] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.202666] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525f1cfe-3918-5836-e26a-52f602c92f9b, 'name': SearchDatastore_Task, 'duration_secs': 0.011174} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.203012] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.203342] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 1315a51d-6d0f-4e6c-9ae1-6af96b74104f/1315a51d-6d0f-4e6c-9ae1-6af96b74104f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1142.203629] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d42ce7bc-3f83-4d24-a5f9-214f5bf06502 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.206079] env[69982]: INFO nova.compute.manager [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Resuming [ 1142.206649] env[69982]: DEBUG nova.objects.instance [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'flavor' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.216852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "refresh_cache-28518353-0bff-460f-8384-f0376280917d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.216924] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquired lock "refresh_cache-28518353-0bff-460f-8384-f0376280917d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.217194] env[69982]: DEBUG nova.network.neutron [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1142.218834] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1142.218834] env[69982]: value = "task-3865391" [ 1142.218834] env[69982]: _type = "Task" [ 1142.218834] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.232757] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865391, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.370289] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.435823] env[69982]: DEBUG nova.network.neutron [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.510732] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865389, 'name': CreateVM_Task, 'duration_secs': 0.342422} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.510732] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1142.511318] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.511513] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.511933] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1142.515125] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8acbaf3-75c7-4b4a-b8ea-02b585d1e4eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.522292] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1142.522292] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250f45a-c0b8-9f66-bfff-abc3b0318660" [ 1142.522292] env[69982]: _type = "Task" [ 1142.522292] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.533114] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250f45a-c0b8-9f66-bfff-abc3b0318660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.669525] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c7a79c-dfd2-4b41-8037-36133c98c880 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.698439] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee2494a-4225-4143-b676-32b17c18c527 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.709107] env[69982]: DEBUG oslo_vmware.api [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865390, 'name': PowerOnVM_Task, 'duration_secs': 0.497301} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.711632] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1142.711826] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5b340278-b9c6-44a2-92f2-cc278fc9cc9a tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance '642b42eb-eeef-401c-8feb-032d783c645a' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1142.741803] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865391, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.755070] env[69982]: DEBUG nova.network.neutron [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1142.777482] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3238c173-00d1-435d-bb65-5dc8e65db78e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.788174] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31e41da-5b5d-4c8a-9f3a-d490a5574ccd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.824593] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7355810f-c770-4351-8c9a-e4b308c2d60a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.832650] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22bcc8f-640a-47e1-b604-c9cbe8c04bb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.846659] env[69982]: DEBUG nova.compute.provider_tree [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.868256] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.882444] env[69982]: DEBUG nova.network.neutron [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.938641] env[69982]: INFO nova.compute.manager [-] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Took 1.28 seconds to deallocate network for instance. [ 1143.038906] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5250f45a-c0b8-9f66-bfff-abc3b0318660, 'name': SearchDatastore_Task, 'duration_secs': 0.060659} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.039241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.039480] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.039722] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.039880] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.040100] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.040382] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eebbb736-cb9f-44d4-8ad0-c8397025e773 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.056188] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.056433] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.057185] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27636eb2-2ca2-4fd8-99d9-f4dab9b6b313 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.063610] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1143.063610] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52649745-6020-1883-1fff-a2dd68d31483" [ 1143.063610] env[69982]: _type = "Task" [ 1143.063610] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.073502] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52649745-6020-1883-1fff-a2dd68d31483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.091431] env[69982]: DEBUG nova.network.neutron [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updated VIF entry in instance network info cache for port 0d321236-4177-49d0-afe0-f203e679fb0a. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1143.091810] env[69982]: DEBUG nova.network.neutron [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.227988] env[69982]: DEBUG nova.compute.manager [None req-fe7c8e9b-6f93-4175-b2d3-1dc788b657ee tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance disappeared during snapshot {{(pid=69982) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 1143.245829] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865391, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.349953] env[69982]: DEBUG nova.compute.manager [None req-fe7c8e9b-6f93-4175-b2d3-1dc788b657ee tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Found 0 images (rotation: 2) {{(pid=69982) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1143.354510] env[69982]: DEBUG nova.scheduler.client.report [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.375676] env[69982]: DEBUG oslo_vmware.api [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865381, 'name': ReconfigVM_Task, 'duration_secs': 5.795833} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.375934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.376157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Reconfigured VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1143.384642] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Releasing lock "refresh_cache-28518353-0bff-460f-8384-f0376280917d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.385043] env[69982]: DEBUG nova.compute.manager [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1143.385236] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.386081] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc25502-b020-4e79-aeac-76db3b20c026 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.395129] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.395409] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b647477d-8838-4ba2-a852-f922c553b879 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.402653] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1143.402653] env[69982]: value = "task-3865392" [ 1143.402653] env[69982]: _type = "Task" [ 1143.402653] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.412041] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.448233] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.577396] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52649745-6020-1883-1fff-a2dd68d31483, 'name': SearchDatastore_Task, 'duration_secs': 0.014787} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.578256] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d694d7-ee79-4f86-98ae-ca74c582fd4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.584680] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1143.584680] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e834c2-e5c9-53b9-e132-4d1953e2907f" [ 1143.584680] env[69982]: _type = "Task" [ 1143.584680] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.594963] env[69982]: DEBUG oslo_concurrency.lockutils [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.595299] env[69982]: DEBUG nova.compute.manager [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Received event network-vif-deleted-d1958f5a-bc24-4a57-b409-5250e0302301 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.595701] env[69982]: INFO nova.compute.manager [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Neutron deleted interface d1958f5a-bc24-4a57-b409-5250e0302301; detaching it from the instance and deleting it from the info cache [ 1143.595916] env[69982]: DEBUG nova.network.neutron [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.597107] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e834c2-e5c9-53b9-e132-4d1953e2907f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.744667] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.744667] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquired lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.744667] env[69982]: DEBUG nova.network.neutron [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1143.745450] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865391, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.864039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.463s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.864763] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1143.867997] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.239s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.868198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.871727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.326s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.876145] env[69982]: INFO nova.compute.claims [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1143.910650] env[69982]: INFO nova.scheduler.client.report [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted allocations for instance 6d390a12-bfb4-4d91-9e83-a81560a08e1a [ 1143.920995] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865392, 'name': PowerOffVM_Task, 'duration_secs': 0.250952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.922818] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1143.923043] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1143.923321] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abd2d8ba-1eeb-4fd9-ac2b-e844b3bb6b52 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.954667] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1143.954912] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1143.955121] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Deleting the datastore file [datastore2] 28518353-0bff-460f-8384-f0376280917d {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1143.955869] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf8176bb-1cdf-4e8e-aa3e-98ae68fb2d9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.964023] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for the task: (returnval){ [ 1143.964023] env[69982]: value = "task-3865394" [ 1143.964023] env[69982]: _type = "Task" [ 1143.964023] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.974405] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865394, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.100038] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e834c2-e5c9-53b9-e132-4d1953e2907f, 'name': SearchDatastore_Task, 'duration_secs': 0.030587} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.100332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.100586] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1144.100853] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-345d9097-1d24-4c88-b719-f375c324d4e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.107911] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a8bf0d0-0be1-4e81-a99f-34d9021bac21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.120999] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1083148b-7b05-4748-9243-d3bcdfa21a1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.132507] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1144.132507] env[69982]: value = "task-3865395" [ 1144.132507] env[69982]: _type = "Task" [ 1144.132507] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.142974] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.156396] env[69982]: DEBUG nova.compute.manager [req-b71a06c4-09dc-4878-b8bc-9673d52f8bb5 req-d85a845e-f436-4d09-a1a9-d76ffd71c9c1 service nova] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Detach interface failed, port_id=d1958f5a-bc24-4a57-b409-5250e0302301, reason: Instance fea9d096-ee82-4ad1-a799-ef7aaf5026a2 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1144.160143] env[69982]: DEBUG nova.compute.manager [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-vif-deleted-b3ad6672-fd65-45f5-8885-0d0a36722a5e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.160418] env[69982]: INFO nova.compute.manager [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Neutron deleted interface b3ad6672-fd65-45f5-8885-0d0a36722a5e; detaching it from the instance and deleting it from the info cache [ 1144.160835] env[69982]: DEBUG nova.network.neutron [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.245766] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865391, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.627186} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.247978] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 1315a51d-6d0f-4e6c-9ae1-6af96b74104f/1315a51d-6d0f-4e6c-9ae1-6af96b74104f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1144.248225] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1144.251390] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b918e8a7-dc69-45be-9c34-6151b75b1e9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.259205] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1144.259205] env[69982]: value = "task-3865396" [ 1144.259205] env[69982]: _type = "Task" [ 1144.259205] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.271332] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.381902] env[69982]: DEBUG nova.compute.utils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1144.385938] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1144.434130] env[69982]: DEBUG oslo_concurrency.lockutils [None req-842da7d4-3624-48fe-9c0f-c5245e3301d3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "6d390a12-bfb4-4d91-9e83-a81560a08e1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.237s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.481727] env[69982]: DEBUG oslo_vmware.api [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Task: {'id': task-3865394, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270364} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.482098] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.482372] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.482520] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.482700] env[69982]: INFO nova.compute.manager [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] [instance: 28518353-0bff-460f-8384-f0376280917d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1144.482959] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1144.483235] env[69982]: DEBUG nova.compute.manager [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1144.483338] env[69982]: DEBUG nova.network.neutron [-] [instance: 28518353-0bff-460f-8384-f0376280917d] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1144.516047] env[69982]: DEBUG nova.network.neutron [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1144.556568] env[69982]: DEBUG nova.network.neutron [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [{"id": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "address": "fa:16:3e:e5:93:97", "network": {"id": "9b1d6c9c-50d6-47f5-810e-5511b6f297ca", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1739325662-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "251f58d95d51416d9d9fd54aa14546e2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3cc0a33d-17c0-4b87-b48f-413a87a4cc6a", "external-id": "nsx-vlan-transportzone-865", "segmentation_id": 865, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ded08ab-b7", "ovs_interfaceid": "1ded08ab-b715-4b57-81f3-69d6383c5a74", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.650175] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865395, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.663544] env[69982]: DEBUG oslo_concurrency.lockutils [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.663717] env[69982]: DEBUG oslo_concurrency.lockutils [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] Acquired lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.664900] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611eee87-68a7-4b93-9e88-f85c3b331414 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.694194] env[69982]: DEBUG oslo_concurrency.lockutils [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] Releasing lock "cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.695932] env[69982]: WARNING nova.compute.manager [req-2fa7a798-8e81-4509-8383-a885d05336f7 req-ca023c95-a066-438f-92a8-b1acc79aa0b8 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Detach interface failed, port_id=b3ad6672-fd65-45f5-8885-0d0a36722a5e, reason: No device with interface-id b3ad6672-fd65-45f5-8885-0d0a36722a5e exists on VM: nova.exception.NotFound: No device with interface-id b3ad6672-fd65-45f5-8885-0d0a36722a5e exists on VM [ 1144.771227] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079816} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.771868] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.772422] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5843dae5-b3f3-4f92-a305-3061c5441455 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.798785] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 1315a51d-6d0f-4e6c-9ae1-6af96b74104f/1315a51d-6d0f-4e6c-9ae1-6af96b74104f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.799192] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9236574-9cc0-4283-a1ce-7243ecc6abb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.817944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.818183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.818389] env[69982]: DEBUG nova.network.neutron [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1144.827736] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1144.827736] env[69982]: value = "task-3865397" [ 1144.827736] env[69982]: _type = "Task" [ 1144.827736] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.837678] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.887933] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1145.019578] env[69982]: DEBUG nova.network.neutron [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.060393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Releasing lock "refresh_cache-ad43c35a-69bc-4c84-8869-cfde6f516b9b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.060662] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed31c7fa-f666-43d7-9833-cb1517c9cfe1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.068634] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Resuming the VM {{(pid=69982) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1145.068785] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8c33c9a-22eb-471e-836c-3521396a4d39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.079871] env[69982]: DEBUG oslo_vmware.api [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1145.079871] env[69982]: value = "task-3865398" [ 1145.079871] env[69982]: _type = "Task" [ 1145.079871] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.090791] env[69982]: DEBUG oslo_vmware.api [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.114399] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.114715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.114972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.115261] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.115507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.118470] env[69982]: INFO nova.compute.manager [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Terminating instance [ 1145.149476] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582074} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.149778] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1145.150012] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1145.150298] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d253cb92-ca94-4a0c-be64-3ed43fe2e113 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.162057] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1145.162057] env[69982]: value = "task-3865399" [ 1145.162057] env[69982]: _type = "Task" [ 1145.162057] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.173333] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865399, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.227147] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014b7b25-3251-447b-8a05-fb026133c618 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.236110] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003dbb26-9c43-458c-8e57-6ff26ce8ebf4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.272846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9937cb3-93ef-42bb-b0cd-c529d2b3675b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.281892] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be45400c-d1eb-42e8-93d5-394132d9b9df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.297058] env[69982]: DEBUG nova.compute.provider_tree [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.337723] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865397, 'name': ReconfigVM_Task, 'duration_secs': 0.342203} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.338039] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 1315a51d-6d0f-4e6c-9ae1-6af96b74104f/1315a51d-6d0f-4e6c-9ae1-6af96b74104f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1145.339023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2de39d5f-35a8-49e8-912e-98c5a3623b2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.346432] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1145.346432] env[69982]: value = "task-3865400" [ 1145.346432] env[69982]: _type = "Task" [ 1145.346432] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.355710] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865400, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.418301] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.418639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.418915] env[69982]: DEBUG nova.compute.manager [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Going to confirm migration 7 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1145.522863] env[69982]: INFO nova.compute.manager [-] [instance: 28518353-0bff-460f-8384-f0376280917d] Took 1.04 seconds to deallocate network for instance. [ 1145.575529] env[69982]: DEBUG nova.network.neutron [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [{"id": "84b43cbd-49c3-4deb-aac7-06329e863173", "address": "fa:16:3e:80:0e:12", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84b43cbd-49", "ovs_interfaceid": "84b43cbd-49c3-4deb-aac7-06329e863173", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.591960] env[69982]: DEBUG oslo_vmware.api [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865398, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.623857] env[69982]: DEBUG nova.compute.manager [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1145.624220] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1145.625872] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb460903-1e9e-4903-ba61-fb5880be8ccd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.636456] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.636885] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7cf137ee-1a63-4f64-a72d-9147cfdfee89 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.645669] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1145.645669] env[69982]: value = "task-3865401" [ 1145.645669] env[69982]: _type = "Task" [ 1145.645669] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.656035] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.671959] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865399, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07667} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.672304] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1145.673211] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbf01c9-946c-4bf0-8ee5-fe107829774b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.701563] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1145.702023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5486965-b976-458b-8baa-a8583d55f4d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.724960] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1145.724960] env[69982]: value = "task-3865402" [ 1145.724960] env[69982]: _type = "Task" [ 1145.724960] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.735665] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865402, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.801462] env[69982]: DEBUG nova.scheduler.client.report [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1145.858688] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865400, 'name': Rename_Task, 'duration_secs': 0.170053} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.859116] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.859814] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95495456-4b1a-4828-a57d-6f51df1ce59d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.869707] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1145.869707] env[69982]: value = "task-3865403" [ 1145.869707] env[69982]: _type = "Task" [ 1145.869707] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.880184] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.904844] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1145.945627] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1145.945835] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1145.945992] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1145.947013] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1145.947254] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1145.947423] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1145.947664] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1145.947844] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1145.948057] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1145.948250] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1145.950145] env[69982]: DEBUG nova.virt.hardware [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1145.951700] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19981df-492d-44a5-857d-63646ee08dbc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.962575] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ab57b6-93b1-471e-9228-7fd16e84c2c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.978857] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1145.985778] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Creating folder: Project (dd798f5cba7f4e85ac7fc8f71c43e3bc). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1145.986069] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65302414-75f7-4d2a-bd18-52b6135eda4c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.998819] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Created folder: Project (dd798f5cba7f4e85ac7fc8f71c43e3bc) in parent group-v767796. [ 1145.999119] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Creating folder: Instances. Parent ref: group-v768081. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1145.999469] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2768b2c9-aee8-4559-a406-3c3a24bf1503 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.010765] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Created folder: Instances in parent group-v768081. [ 1146.011092] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1146.011330] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1146.011597] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-912b5cbd-3c04-4c28-8bb5-443ecf6f22cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.027828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.027917] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquired lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.028067] env[69982]: DEBUG nova.network.neutron [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1146.028269] env[69982]: DEBUG nova.objects.instance [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'info_cache' on Instance uuid 642b42eb-eeef-401c-8feb-032d783c645a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.034033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.037669] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1146.037669] env[69982]: value = "task-3865406" [ 1146.037669] env[69982]: _type = "Task" [ 1146.037669] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.052021] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865406, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.081450] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-cf08cf32-f3d4-494f-a51b-a40616e76429" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.098320] env[69982]: DEBUG oslo_vmware.api [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865398, 'name': PowerOnVM_Task, 'duration_secs': 0.607069} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.098810] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Resumed the VM {{(pid=69982) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1146.099055] env[69982]: DEBUG nova.compute.manager [None req-1ebeabc4-cc00-4294-9725-28a3472bd5ca tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.100466] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16682d28-5336-47e1-8341-ef2ad22e1aaa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.160180] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865401, 'name': PowerOffVM_Task, 'duration_secs': 0.234224} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.160645] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.161160] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1146.161617] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d851324-77f8-43fb-a7cc-1400ebb1075e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.239547] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865402, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.241104] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1146.241445] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1146.241679] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleting the datastore file [datastore2] cf08cf32-f3d4-494f-a51b-a40616e76429 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1146.241990] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-36bb9002-3ec0-414b-8d04-bdd4dc459e41 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.249112] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1146.249112] env[69982]: value = "task-3865408" [ 1146.249112] env[69982]: _type = "Task" [ 1146.249112] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.258353] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.308673] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.437s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.309283] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1146.313824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 9.598s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.382059] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865403, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.552294] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865406, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.583775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-870d6bd2-996e-4941-a5b9-00a6827635e1 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-cf08cf32-f3d4-494f-a51b-a40616e76429-b3ad6672-fd65-45f5-8885-0d0a36722a5e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.803s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.739442] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865402, 'name': ReconfigVM_Task, 'duration_secs': 0.747908} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.740160] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1146.740888] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d5f6ecd-dbd1-4bcd-8ac6-bdfd0a0d40c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.750917] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1146.750917] env[69982]: value = "task-3865409" [ 1146.750917] env[69982]: _type = "Task" [ 1146.750917] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.765765] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865409, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.769162] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.818886] env[69982]: DEBUG nova.compute.utils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1146.826891] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1146.826986] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1146.877772] env[69982]: DEBUG nova.policy [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1146.888708] env[69982]: DEBUG oslo_vmware.api [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865403, 'name': PowerOnVM_Task, 'duration_secs': 0.517764} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.894427] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.894684] env[69982]: INFO nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1146.894973] env[69982]: DEBUG nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.896640] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70cc79e-6e18-4cac-ad95-90d21517dfd0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.055162] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865406, 'name': CreateVM_Task, 'duration_secs': 0.76142} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.055162] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1147.055162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.055162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.055162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1147.055162] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0041162-b1e0-409c-b875-2919b525273d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.063988] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1147.063988] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e9ea4-4f87-e1b8-f15e-beb0006f5902" [ 1147.063988] env[69982]: _type = "Task" [ 1147.063988] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.075500] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e9ea4-4f87-e1b8-f15e-beb0006f5902, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.141024] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1087b67a-d074-4456-9a35-4420fec2f26c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.153717] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32b3a40-eb46-42ba-a489-1c9501c33488 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.192212] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96b7ff4-d5fa-48d3-9097-222d02edf6f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.202524] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7af8d9e-6a9a-403f-bead-f41a56c0069a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.219454] env[69982]: DEBUG nova.compute.provider_tree [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.270356] env[69982]: DEBUG oslo_vmware.api [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.690622} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.272940] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1147.273230] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1147.273831] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1147.273831] env[69982]: INFO nova.compute.manager [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1147.273931] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1147.274238] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865409, 'name': Rename_Task, 'duration_secs': 0.339523} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.274399] env[69982]: DEBUG nova.compute.manager [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1147.274509] env[69982]: DEBUG nova.network.neutron [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1147.276261] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1147.276574] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d338edb-676d-44aa-b38d-a243d0923932 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.287499] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1147.287499] env[69982]: value = "task-3865410" [ 1147.287499] env[69982]: _type = "Task" [ 1147.287499] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.297872] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865410, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.298906] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Successfully created port: 299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.327546] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1147.399360] env[69982]: DEBUG nova.network.neutron [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [{"id": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "address": "fa:16:3e:21:93:f4", "network": {"id": "c047a948-fbcd-460b-bab8-964e429eb6b7", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1201822121-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "babff47774384a5ca2d938bcc6331aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56398cc0-e39f-410f-8036-8c2a6870e26f", "external-id": "nsx-vlan-transportzone-612", "segmentation_id": 612, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb3a7809c-bd", "ovs_interfaceid": "b3a7809c-bd2b-4433-aab2-dc4c413eff31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.422885] env[69982]: INFO nova.compute.manager [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Took 33.20 seconds to build instance. [ 1147.576929] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523e9ea4-4f87-e1b8-f15e-beb0006f5902, 'name': SearchDatastore_Task, 'duration_secs': 0.035831} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.577326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.577628] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1147.577876] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.578440] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.578440] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1147.578563] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c90f691-d2ea-45fe-9417-d5b30cb40f10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.589710] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1147.589912] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1147.590794] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45cc9fe9-e680-4f55-ac8d-bd4a4b86efbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.598349] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1147.598349] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529a3e21-9c93-a0ba-2ca4-005c212c4c92" [ 1147.598349] env[69982]: _type = "Task" [ 1147.598349] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.614419] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529a3e21-9c93-a0ba-2ca4-005c212c4c92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.723593] env[69982]: DEBUG nova.scheduler.client.report [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1147.770840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.771155] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.803578] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865410, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.907665] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Releasing lock "refresh_cache-642b42eb-eeef-401c-8feb-032d783c645a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.907969] env[69982]: DEBUG nova.objects.instance [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lazy-loading 'migration_context' on Instance uuid 642b42eb-eeef-401c-8feb-032d783c645a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.929580] env[69982]: DEBUG oslo_concurrency.lockutils [None req-529b0a4c-e740-4f2a-9b73-8d4ad3eec85b tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.728s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.114062] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529a3e21-9c93-a0ba-2ca4-005c212c4c92, 'name': SearchDatastore_Task, 'duration_secs': 0.023336} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.114467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7607cb0-d78c-46af-a37d-75449e9d46a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.121989] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1148.121989] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52113eae-c2f3-0233-1de3-525a96a4bf0e" [ 1148.121989] env[69982]: _type = "Task" [ 1148.121989] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.134356] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52113eae-c2f3-0233-1de3-525a96a4bf0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.274037] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1148.302066] env[69982]: DEBUG oslo_vmware.api [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865410, 'name': PowerOnVM_Task, 'duration_secs': 0.784813} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.302544] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1148.302868] env[69982]: INFO nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Took 8.86 seconds to spawn the instance on the hypervisor. [ 1148.303255] env[69982]: DEBUG nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1148.304458] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd14abed-925d-4819-adbd-771df6a14da6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.342805] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1148.379848] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1148.380102] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.380256] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1148.381018] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.381018] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1148.381018] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1148.381018] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1148.381312] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1148.381312] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1148.381422] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1148.381584] env[69982]: DEBUG nova.virt.hardware [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1148.382726] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a43b603-4396-4f10-b734-107cf9611fb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.391300] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e32b56-ea93-4d34-a26b-225c1d6a338c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.411565] env[69982]: DEBUG nova.objects.base [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Object Instance<642b42eb-eeef-401c-8feb-032d783c645a> lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1148.412433] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4d9009-8db9-4e3f-9d00-dfec5dd2b266 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.436885] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac8f216e-ed5a-496e-8bbd-08628b435f0c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.444515] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1148.444515] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5294b182-29d3-43cf-f1cc-0f62993a5c23" [ 1148.444515] env[69982]: _type = "Task" [ 1148.444515] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.454989] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5294b182-29d3-43cf-f1cc-0f62993a5c23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.632963] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52113eae-c2f3-0233-1de3-525a96a4bf0e, 'name': SearchDatastore_Task, 'duration_secs': 0.033044} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.632963] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.632963] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1148.633395] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-270b86c5-2721-469f-afd7-0c4f2d03bd16 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.642467] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1148.642467] env[69982]: value = "task-3865411" [ 1148.642467] env[69982]: _type = "Task" [ 1148.642467] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.653491] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865411, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.736131] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.422s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.738267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.790s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.744220] env[69982]: INFO nova.compute.claims [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1148.801927] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.839258] env[69982]: INFO nova.compute.manager [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Took 32.72 seconds to build instance. [ 1148.846217] env[69982]: DEBUG nova.compute.manager [req-3076c9d6-2730-4397-9fb0-d1fe5ffb8109 req-b6812fbd-9bd0-4a6d-91e8-638c635e5ff7 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Received event network-vif-deleted-84b43cbd-49c3-4deb-aac7-06329e863173 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.846341] env[69982]: INFO nova.compute.manager [req-3076c9d6-2730-4397-9fb0-d1fe5ffb8109 req-b6812fbd-9bd0-4a6d-91e8-638c635e5ff7 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Neutron deleted interface 84b43cbd-49c3-4deb-aac7-06329e863173; detaching it from the instance and deleting it from the info cache [ 1148.846535] env[69982]: DEBUG nova.network.neutron [req-3076c9d6-2730-4397-9fb0-d1fe5ffb8109 req-b6812fbd-9bd0-4a6d-91e8-638c635e5ff7 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.941548] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Successfully updated port: 299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.961078] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5294b182-29d3-43cf-f1cc-0f62993a5c23, 'name': SearchDatastore_Task, 'duration_secs': 0.020561} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.961620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.962983] env[69982]: DEBUG nova.network.neutron [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.155279] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865411, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.299992] env[69982]: DEBUG nova.compute.manager [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Received event network-vif-plugged-299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1149.301973] env[69982]: DEBUG oslo_concurrency.lockutils [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] Acquiring lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.301973] env[69982]: DEBUG oslo_concurrency.lockutils [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.301973] env[69982]: DEBUG oslo_concurrency.lockutils [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.301973] env[69982]: DEBUG nova.compute.manager [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] No waiting events found dispatching network-vif-plugged-299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1149.302509] env[69982]: WARNING nova.compute.manager [req-5be7f680-0334-49d1-841d-ca3690690be4 req-605bc6e1-bae6-41ae-84d2-5ed235b015f0 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Received unexpected event network-vif-plugged-299c46e3-a90a-4290-8d1e-2eb3e4d336db for instance with vm_state building and task_state spawning. [ 1149.342689] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f06ef9c6-88a7-447e-8710-408691314681 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.236s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.348553] env[69982]: INFO nova.scheduler.client.report [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted allocation for migration 8f21cc6b-0f54-4ba0-b9f2-3dc19533f868 [ 1149.350108] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73f26f4c-dea2-49f5-8790-13918d061a37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.365255] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc4f8d1-342e-4e75-9f1e-e57cc059efb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.400329] env[69982]: DEBUG nova.compute.manager [req-3076c9d6-2730-4397-9fb0-d1fe5ffb8109 req-b6812fbd-9bd0-4a6d-91e8-638c635e5ff7 service nova] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Detach interface failed, port_id=84b43cbd-49c3-4deb-aac7-06329e863173, reason: Instance cf08cf32-f3d4-494f-a51b-a40616e76429 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1149.448932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.448932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.448932] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1149.465463] env[69982]: INFO nova.compute.manager [-] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Took 2.19 seconds to deallocate network for instance. [ 1149.466377] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68f63c3-fdc4-41dd-9c1c-dc260bf7750b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.480200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Suspending the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1149.480200] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e3174602-5b15-442e-bd0a-0ae3a4256f8a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.484395] env[69982]: DEBUG oslo_vmware.api [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] Waiting for the task: (returnval){ [ 1149.484395] env[69982]: value = "task-3865412" [ 1149.484395] env[69982]: _type = "Task" [ 1149.484395] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.496645] env[69982]: DEBUG oslo_vmware.api [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] Task: {'id': task-3865412, 'name': SuspendVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.664684] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865411, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.860195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c683bc5-21e6-4075-be75-da152473268c tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.638s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.978907] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.996529] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1150.002162] env[69982]: DEBUG oslo_vmware.api [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] Task: {'id': task-3865412, 'name': SuspendVM_Task} progress is 58%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.098886] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d229dc-824e-46ee-9ff2-4ae39ee6a8bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.107381] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96db957c-953a-4ca8-acc5-5678dfcf6ef0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.145839] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aebaff2c-f601-4c8d-b093-0b3b8e7e1163 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.159319] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865411, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.163101] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519d9030-1abc-4f69-a4fc-9a70834a95f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.180346] env[69982]: DEBUG nova.compute.provider_tree [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.198778] env[69982]: DEBUG nova.network.neutron [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Updating instance_info_cache with network_info: [{"id": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "address": "fa:16:3e:a4:9c:21", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap299c46e3-a9", "ovs_interfaceid": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.495054] env[69982]: DEBUG oslo_vmware.api [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] Task: {'id': task-3865412, 'name': SuspendVM_Task, 'duration_secs': 0.728051} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.495368] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Suspended the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1150.496506] env[69982]: DEBUG nova.compute.manager [None req-0ad6639b-5786-43ca-b530-1e883f00e0de tempest-ServersAdminNegativeTestJSON-1801806097 tempest-ServersAdminNegativeTestJSON-1801806097-project-admin] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.496506] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb10f77-e312-4858-a493-473449f358cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.660434] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865411, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.72806} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.660738] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1150.661081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1150.661290] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a2e3c2e-5db5-477c-995e-5e1ef9f50787 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.668544] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1150.668544] env[69982]: value = "task-3865413" [ 1150.668544] env[69982]: _type = "Task" [ 1150.668544] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.679904] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865413, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.689620] env[69982]: DEBUG nova.scheduler.client.report [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.701342] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.701642] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Instance network_info: |[{"id": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "address": "fa:16:3e:a4:9c:21", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap299c46e3-a9", "ovs_interfaceid": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1150.703530] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:9c:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '299c46e3-a90a-4290-8d1e-2eb3e4d336db', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.710265] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1150.711089] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1150.711323] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20efffc4-e714-4629-81b9-36fc3920df70 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.733117] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.733117] env[69982]: value = "task-3865414" [ 1150.733117] env[69982]: _type = "Task" [ 1150.733117] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.742519] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865414, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.180306] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865413, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.180710] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1151.181666] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a93ca-e6a1-447c-af9b-f0a5c78381c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.196934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.197461] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1151.209226] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1151.210078] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.764s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.210402] env[69982]: DEBUG nova.objects.instance [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lazy-loading 'resources' on Instance uuid fea9d096-ee82-4ad1-a799-ef7aaf5026a2 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1151.211684] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1922a15-66f1-4eb0-8e38-77d902d1c27b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.233302] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1151.233302] env[69982]: value = "task-3865415" [ 1151.233302] env[69982]: _type = "Task" [ 1151.233302] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.247076] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865415, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.250750] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865414, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.424883] env[69982]: DEBUG nova.compute.manager [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Received event network-changed-0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.424883] env[69982]: DEBUG nova.compute.manager [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Refreshing instance network info cache due to event network-changed-0d321236-4177-49d0-afe0-f203e679fb0a. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.424883] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.424883] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.424883] env[69982]: DEBUG nova.network.neutron [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Refreshing network info cache for port 0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1151.577916] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.578259] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.712484] env[69982]: DEBUG nova.compute.utils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1151.714092] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1151.714276] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1151.747362] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865415, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.753775] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865414, 'name': CreateVM_Task, 'duration_secs': 0.539193} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.755464] env[69982]: DEBUG nova.policy [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c84472005ef43d99658fa6f5cf59bc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '07f7b975ecb449a290e2ae6582e07016', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1151.757078] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1151.757801] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.757995] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.758347] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1151.758906] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-213e344d-c787-4a2e-965e-273cfb0d91aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.764822] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1151.764822] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5225f0a6-ff23-dd53-12a3-6f88d2a1689b" [ 1151.764822] env[69982]: _type = "Task" [ 1151.764822] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.776797] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5225f0a6-ff23-dd53-12a3-6f88d2a1689b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.913282] env[69982]: DEBUG nova.compute.manager [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Received event network-changed-299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1151.913484] env[69982]: DEBUG nova.compute.manager [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Refreshing instance network info cache due to event network-changed-299c46e3-a90a-4290-8d1e-2eb3e4d336db. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1151.913741] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] Acquiring lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.913837] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] Acquired lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.914027] env[69982]: DEBUG nova.network.neutron [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Refreshing network info cache for port 299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1151.990980] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428b296c-9727-44ec-a0bc-503d56a6bbf9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.999377] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f60c518-1de2-446e-a2ea-7e969a602d38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.032172] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b792115-a450-4367-9cc7-6d621bc642b8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.040388] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa7f775-1952-455a-903e-a18635734e8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.053949] env[69982]: DEBUG nova.compute.provider_tree [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.060587] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Successfully created port: ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1152.082919] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1152.217937] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1152.231957] env[69982]: DEBUG nova.network.neutron [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updated VIF entry in instance network info cache for port 0d321236-4177-49d0-afe0-f203e679fb0a. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1152.231957] env[69982]: DEBUG nova.network.neutron [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.248861] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865415, 'name': ReconfigVM_Task, 'duration_secs': 0.608874} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.249189] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1152.249876] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba54e419-a12f-49d2-a4fc-2802e7c7ced9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.259735] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1152.259735] env[69982]: value = "task-3865416" [ 1152.259735] env[69982]: _type = "Task" [ 1152.259735] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.277896] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865416, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.288245] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5225f0a6-ff23-dd53-12a3-6f88d2a1689b, 'name': SearchDatastore_Task, 'duration_secs': 0.020897} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.288568] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.288807] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1152.289058] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.289211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.289399] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1152.289941] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0be46e9-82a4-4084-9902-07b4c67e3b2e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.301070] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1152.301250] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1152.302187] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e39c5eae-3b72-4fa6-8cab-06039c3ce7a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.308713] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1152.308713] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bdd3cb-a820-81e0-3759-1c28e0e8d02c" [ 1152.308713] env[69982]: _type = "Task" [ 1152.308713] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.321534] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.321755] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.321958] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.322235] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.322457] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.324282] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bdd3cb-a820-81e0-3759-1c28e0e8d02c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.325453] env[69982]: INFO nova.compute.manager [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Terminating instance [ 1152.556684] env[69982]: DEBUG nova.scheduler.client.report [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.609482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.662349] env[69982]: DEBUG nova.network.neutron [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Updated VIF entry in instance network info cache for port 299c46e3-a90a-4290-8d1e-2eb3e4d336db. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1152.662737] env[69982]: DEBUG nova.network.neutron [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Updating instance_info_cache with network_info: [{"id": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "address": "fa:16:3e:a4:9c:21", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap299c46e3-a9", "ovs_interfaceid": "299c46e3-a90a-4290-8d1e-2eb3e4d336db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.723456] env[69982]: INFO nova.virt.block_device [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Booting with volume d6ce4976-1bf1-441f-8fbb-35dab72ed085 at /dev/sda [ 1152.737809] env[69982]: DEBUG oslo_concurrency.lockutils [req-9a459c00-d886-43b9-be86-2a87bb35dd6a req-6ccd3dff-bd8b-4e17-a253-1465a3af9e81 service nova] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.760202] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-358e6f65-3975-42b7-b7f0-9c882ed64b1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.772180] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865416, 'name': Rename_Task, 'duration_secs': 0.149377} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.773436] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1152.773623] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-090090b0-4c74-4593-921e-22dcb8f409b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.777752] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3b16cc-207b-4c44-a303-7f5118268be6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.793475] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1152.793475] env[69982]: value = "task-3865417" [ 1152.793475] env[69982]: _type = "Task" [ 1152.793475] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.811744] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e33ca5f3-bef0-458a-b858-4c3c1aeccded {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.814282] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865417, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.826710] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bdd3cb-a820-81e0-3759-1c28e0e8d02c, 'name': SearchDatastore_Task, 'duration_secs': 0.011529} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.830259] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e529ab64-d019-40f3-9027-166a73479271 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.841482] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-558c8fd5-725e-413b-8694-85ebe4767d5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.843537] env[69982]: DEBUG nova.compute.manager [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1152.843740] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1152.844545] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a755b5a-54e6-4e08-92a5-463d5b04833e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.852666] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1152.852666] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52439e3b-629e-9f12-5e16-bf6c0c19812d" [ 1152.852666] env[69982]: _type = "Task" [ 1152.852666] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.855334] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1152.858849] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d234d2d7-3566-4d2a-9ae1-fdadd0ca7ba5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.878324] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beed8367-68c5-43bd-a0c3-e312dc51f36d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.881270] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52439e3b-629e-9f12-5e16-bf6c0c19812d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.881602] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1152.881602] env[69982]: value = "task-3865418" [ 1152.881602] env[69982]: _type = "Task" [ 1152.881602] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.888638] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ee4de4-561e-46e0-b537-95ba622166c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.894794] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865418, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.908749] env[69982]: DEBUG nova.virt.block_device [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating existing volume attachment record: c689bc0a-aeb8-4ffd-afb2-4db324c07bab {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1153.062195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.065182] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.034s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.065427] env[69982]: DEBUG nova.objects.instance [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lazy-loading 'resources' on Instance uuid 28518353-0bff-460f-8384-f0376280917d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.085369] env[69982]: INFO nova.scheduler.client.report [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Deleted allocations for instance fea9d096-ee82-4ad1-a799-ef7aaf5026a2 [ 1153.166127] env[69982]: DEBUG oslo_concurrency.lockutils [req-fd961ddf-f87f-41cf-99a8-57274aad877f req-2d5f6c7d-6cd3-4e39-a8a1-b8af80ac6920 service nova] Releasing lock "refresh_cache-e5f7aebd-22a8-47fd-9b73-09791aecae56" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.304052] env[69982]: DEBUG oslo_vmware.api [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865417, 'name': PowerOnVM_Task, 'duration_secs': 0.480133} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.304281] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1153.304437] env[69982]: INFO nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Took 7.40 seconds to spawn the instance on the hypervisor. [ 1153.304626] env[69982]: DEBUG nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1153.305469] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e97bb7-7a9c-46e7-807a-2fd8018cb09f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.366121] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52439e3b-629e-9f12-5e16-bf6c0c19812d, 'name': SearchDatastore_Task, 'duration_secs': 0.013885} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.366400] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.366670] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e5f7aebd-22a8-47fd-9b73-09791aecae56/e5f7aebd-22a8-47fd-9b73-09791aecae56.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1153.366939] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-beb53133-0671-472b-9c69-5620afec3f77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.373994] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1153.373994] env[69982]: value = "task-3865419" [ 1153.373994] env[69982]: _type = "Task" [ 1153.373994] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.383176] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.392614] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865418, 'name': PowerOffVM_Task, 'duration_secs': 0.176022} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.392922] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1153.393201] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1153.393484] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-428673cf-7b02-476f-b34f-0b87ca7e3480 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.456396] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1153.456675] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1153.456882] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleting the datastore file [datastore1] 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1153.457273] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b3891d7-3db2-415c-86a7-c39b9cfd4147 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.467971] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1153.467971] env[69982]: value = "task-3865421" [ 1153.467971] env[69982]: _type = "Task" [ 1153.467971] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.482514] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.587811] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Successfully updated port: ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1153.594181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c362e3a1-0346-4367-835d-237225d5f6b1 tempest-ServerDiskConfigTestJSON-1623943188 tempest-ServerDiskConfigTestJSON-1623943188-project-member] Lock "fea9d096-ee82-4ad1-a799-ef7aaf5026a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.589s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.826286] env[69982]: INFO nova.compute.manager [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Took 23.68 seconds to build instance. [ 1153.852502] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f29d8453-8178-49bd-b72e-d8338d7d2a5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.862179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd69aab-be21-4d51-9af1-5ca23b30963c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.902016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33831320-e3e7-463e-96dc-d6bb9feaafbf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.910732] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865419, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.914331] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ddc2635-7138-4379-a773-58ab165d6d6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.930431] env[69982]: DEBUG nova.compute.provider_tree [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.979915] env[69982]: DEBUG oslo_vmware.api [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242634} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.980157] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1153.980346] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1153.980524] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1153.980698] env[69982]: INFO nova.compute.manager [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1153.980943] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.981268] env[69982]: DEBUG nova.compute.manager [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1153.981381] env[69982]: DEBUG nova.network.neutron [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1154.091427] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.091427] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.091427] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1154.239507] env[69982]: DEBUG nova.compute.manager [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Received event network-vif-plugged-ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.239722] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.240279] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.241040] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.241040] env[69982]: DEBUG nova.compute.manager [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] No waiting events found dispatching network-vif-plugged-ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1154.241040] env[69982]: WARNING nova.compute.manager [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Received unexpected event network-vif-plugged-ed266e98-9354-46dd-a173-6c25f605f719 for instance with vm_state building and task_state block_device_mapping. [ 1154.241575] env[69982]: DEBUG nova.compute.manager [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Received event network-changed-ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1154.241575] env[69982]: DEBUG nova.compute.manager [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Refreshing instance network info cache due to event network-changed-ed266e98-9354-46dd-a173-6c25f605f719. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1154.241736] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.328935] env[69982]: DEBUG oslo_concurrency.lockutils [None req-50edc449-be05-4ad8-a8f2-b04614fff37d tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.200s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.408708] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865419, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607165} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.408708] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e5f7aebd-22a8-47fd-9b73-09791aecae56/e5f7aebd-22a8-47fd-9b73-09791aecae56.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1154.410086] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1154.410086] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d169c017-dd6f-4273-900d-11adb0ee1502 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.416355] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1154.416355] env[69982]: value = "task-3865422" [ 1154.416355] env[69982]: _type = "Task" [ 1154.416355] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.426200] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865422, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.434419] env[69982]: DEBUG nova.scheduler.client.report [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.631188] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1154.774716] env[69982]: DEBUG nova.network.neutron [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.927201] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865422, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231939} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.927572] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1154.928395] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de1e3c8-bb43-4174-86ab-876d24a987ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.947317] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.882s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.965245] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] e5f7aebd-22a8-47fd-9b73-09791aecae56/e5f7aebd-22a8-47fd-9b73-09791aecae56.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.966182] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.164s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.968531] env[69982]: INFO nova.compute.claims [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.972171] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98485616-5f6d-471b-818f-02ac812d68f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.988050] env[69982]: DEBUG nova.network.neutron [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.990095] env[69982]: INFO nova.scheduler.client.report [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Deleted allocations for instance 28518353-0bff-460f-8384-f0376280917d [ 1154.996750] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1154.996750] env[69982]: value = "task-3865423" [ 1154.996750] env[69982]: _type = "Task" [ 1154.996750] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.006707] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.008240] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1155.008719] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1155.008956] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.009137] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1155.009456] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.009526] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1155.009649] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1155.009864] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1155.010382] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1155.010382] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1155.010382] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1155.010755] env[69982]: DEBUG nova.virt.hardware [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1155.011693] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef302ca-9798-4f6e-8d62-8f00a3e74ce4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.023456] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc571a09-9cb4-44fd-ac30-b067f08b3268 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.074559] env[69982]: INFO nova.compute.manager [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Rebuilding instance [ 1155.133804] env[69982]: DEBUG nova.compute.manager [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.134686] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018d4e06-1a70-4e17-a2ff-f3688620641e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.278024] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.278441] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Instance network_info: |[{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1155.278784] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.278964] env[69982]: DEBUG nova.network.neutron [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Refreshing network info cache for port ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1155.281587] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:e1:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed266e98-9354-46dd-a173-6c25f605f719', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1155.295283] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1155.297950] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1155.297950] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bab6e7a-804f-44ac-9896-4fe614c539d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.328720] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1155.328720] env[69982]: value = "task-3865424" [ 1155.328720] env[69982]: _type = "Task" [ 1155.328720] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.341066] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865424, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.492388] env[69982]: INFO nova.compute.manager [-] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Took 1.51 seconds to deallocate network for instance. [ 1155.505112] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17f6494a-3ea7-4117-92ec-3f5578f2c59d tempest-ServersAaction247Test-1682944898 tempest-ServersAaction247Test-1682944898-project-member] Lock "28518353-0bff-460f-8384-f0376280917d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.797s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.515069] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865423, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.842055] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865424, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.002922] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.015360] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865423, 'name': ReconfigVM_Task, 'duration_secs': 0.695524} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.015582] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Reconfigured VM instance instance-0000006a to attach disk [datastore2] e5f7aebd-22a8-47fd-9b73-09791aecae56/e5f7aebd-22a8-47fd-9b73-09791aecae56.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1156.016330] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5c25f42c-c582-4655-be8d-6c2e78e35999 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.027845] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1156.027845] env[69982]: value = "task-3865425" [ 1156.027845] env[69982]: _type = "Task" [ 1156.027845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.039504] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865425, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.065463] env[69982]: DEBUG nova.network.neutron [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updated VIF entry in instance network info cache for port ed266e98-9354-46dd-a173-6c25f605f719. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1156.065894] env[69982]: DEBUG nova.network.neutron [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.149746] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1156.150260] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba9fdcf5-acae-47b5-91d8-c0d8ff714313 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.158409] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1156.158409] env[69982]: value = "task-3865426" [ 1156.158409] env[69982]: _type = "Task" [ 1156.158409] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.168199] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.255296] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bec8cee6-bdc5-40d5-bd99-3982c7d6e8c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.264902] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a06fc11-ace5-40e6-9414-a4f9cbc6fab8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.297258] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698a3879-0743-46c0-8f03-080d23264e62 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.305171] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e2a8a5-2f59-4f95-8320-bab12c77601f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.319449] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.319837] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.320524] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.320524] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.320759] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.327021] env[69982]: DEBUG nova.compute.provider_tree [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.327021] env[69982]: INFO nova.compute.manager [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Terminating instance [ 1156.340050] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865424, 'name': CreateVM_Task, 'duration_secs': 0.516729} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.340230] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1156.340900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768074', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'name': 'volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9815a4f9-3827-4e83-b897-18edadcac55b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'serial': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085'}, 'guest_format': None, 'attachment_id': 'c689bc0a-aeb8-4ffd-afb2-4db324c07bab', 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69982) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1156.341216] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Root volume attach. Driver type: vmdk {{(pid=69982) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1156.342161] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150e6684-68fb-4241-a0f5-ce0231f8b452 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.350975] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56552a55-cad0-41b2-903a-2cc68c3438f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.361531] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759fa15c-8e1d-451a-b176-2881ab70c074 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.369881] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-1bd257e6-d297-4b50-baae-ccecc28a4086 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.378244] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1156.378244] env[69982]: value = "task-3865427" [ 1156.378244] env[69982]: _type = "Task" [ 1156.378244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.387148] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.421843] env[69982]: DEBUG nova.compute.manager [req-bca2dcbc-eb13-408c-98fd-7e897befefc3 req-48e1898b-0957-417b-8157-2b1e43f6d2cf service nova] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Received event network-vif-deleted-c816f035-7c9d-47ba-8b3b-29a57ec10561 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1156.538191] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865425, 'name': Rename_Task, 'duration_secs': 0.224773} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.538488] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1156.538741] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10aca9e5-a718-41b6-ac04-ce485e86d133 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.545388] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1156.545388] env[69982]: value = "task-3865428" [ 1156.545388] env[69982]: _type = "Task" [ 1156.545388] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.554353] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865428, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.569495] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8659d17-7d64-450d-9ba2-b091390c7c2c req-8e7929aa-5a78-4513-83a3-c0080e0740d8 service nova] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.668906] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865426, 'name': PowerOffVM_Task, 'duration_secs': 0.145892} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.669204] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1156.669439] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.670318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f1d99c-96fb-46aa-aa3c-dca3bd8a56b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.678369] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.678641] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53e2012d-a080-4fdd-b685-cca5f3980ad0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.706208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.706446] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.706634] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Deleting the datastore file [datastore1] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.706918] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a1027b2-10f6-4761-b270-7835680d7549 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.714036] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1156.714036] env[69982]: value = "task-3865430" [ 1156.714036] env[69982]: _type = "Task" [ 1156.714036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.723372] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.828722] env[69982]: DEBUG nova.scheduler.client.report [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1156.835592] env[69982]: DEBUG nova.compute.manager [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1156.835592] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1156.835592] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38892b28-4de5-4730-b0c7-0efd2e9c5ba0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.846848] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1156.847243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83827dc9-b053-43cc-825c-2b33560a187b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.893697] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 35%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.990938] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.991915] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.991915] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleting the datastore file [datastore2] 1315a51d-6d0f-4e6c-9ae1-6af96b74104f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.993442] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b375a06f-221b-499d-9196-f0ba81705366 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.004701] env[69982]: DEBUG oslo_vmware.api [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1157.004701] env[69982]: value = "task-3865432" [ 1157.004701] env[69982]: _type = "Task" [ 1157.004701] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.020813] env[69982]: DEBUG oslo_vmware.api [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.063088] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865428, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.228438] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.338023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.338023] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1157.340297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.379s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.396526] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 49%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.520592] env[69982]: DEBUG oslo_vmware.api [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.372061} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.520927] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1157.521164] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1157.521564] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1157.521564] env[69982]: INFO nova.compute.manager [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Took 0.69 seconds to destroy the instance on the hypervisor. [ 1157.521821] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.522037] env[69982]: DEBUG nova.compute.manager [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1157.522138] env[69982]: DEBUG nova.network.neutron [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1157.561564] env[69982]: DEBUG oslo_vmware.api [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865428, 'name': PowerOnVM_Task, 'duration_secs': 0.728513} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.561757] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.561966] env[69982]: INFO nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1157.562172] env[69982]: DEBUG nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.563573] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cd5554-246a-4ef9-af2d-e8cb57a85787 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.729885] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865430, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.851064] env[69982]: DEBUG nova.compute.utils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1157.851064] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1157.851064] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1157.896073] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 62%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.953025] env[69982]: DEBUG nova.policy [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '361bff09a25e4b5ab6a071a458858131', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afaf89b0250d46048813da25c754e1a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.094926] env[69982]: INFO nova.compute.manager [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Took 22.57 seconds to build instance. [ 1158.181061] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f9588b-bf40-46f9-89f2-a6748aff034c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.198086] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63f8cf6-dde4-441b-a751-cf28984047f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.251665] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39edf227-bb54-4a34-84c8-8e12172392be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.266677] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69e1af3-f391-4b55-9f57-25a08962af19 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.271669] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865430, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.118044} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.271863] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1158.271991] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1158.272269] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1158.291801] env[69982]: DEBUG nova.compute.provider_tree [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1158.354458] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1158.396197] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 76%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.600367] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ea7f46e6-1d81-46d2-91d0-14af7246d23b tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.086s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.755290] env[69982]: DEBUG nova.network.neutron [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.795968] env[69982]: DEBUG nova.scheduler.client.report [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.832682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.833017] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.833326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.833530] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.833745] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.836027] env[69982]: INFO nova.compute.manager [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Terminating instance [ 1158.888406] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Successfully created port: 4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1158.899648] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.903953] env[69982]: DEBUG nova.compute.manager [req-a33a5e00-a127-4932-8ae1-77ca4472b0e1 req-b2049114-5d5a-44bf-ae91-0f807723202c service nova] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Received event network-vif-deleted-26a5b972-58d8-44a6-abfb-c79dd1301e05 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.259203] env[69982]: INFO nova.compute.manager [-] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Took 1.74 seconds to deallocate network for instance. [ 1159.315629] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.315927] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.316103] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.316329] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.316498] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.316655] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.316895] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.317059] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.317435] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.317435] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.317577] env[69982]: DEBUG nova.virt.hardware [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.319051] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a7bebd-a644-4fda-bc09-3f65ad9e3f35 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.331540] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e36774-50dd-451d-8668-75385bf7001b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.343375] env[69982]: DEBUG nova.compute.manager [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1159.343609] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1159.354148] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd7896ac-e689-443f-b61a-5830c804b734 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.358052] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1159.364797] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1159.364932] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1159.365994] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-849892c0-c2fa-44c4-a0ee-a3b0219fd146 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.382738] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1159.385182] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1159.386735] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-013ce276-98a3-4d8b-9204-cb2476412fcf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.393588] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1159.393588] env[69982]: value = "task-3865433" [ 1159.393588] env[69982]: _type = "Task" [ 1159.393588] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.406080] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.406080] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1159.406080] env[69982]: value = "task-3865434" [ 1159.406080] env[69982]: _type = "Task" [ 1159.406080] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.414550] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865433, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.420316] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1159.420613] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1159.420747] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1159.421056] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1159.421272] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1159.421518] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1159.421966] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1159.421966] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1159.422163] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1159.422368] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1159.422550] env[69982]: DEBUG nova.virt.hardware [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1159.422983] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865434, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.424332] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510370ad-4d03-42ff-b0ee-b334a1fd63f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.433764] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba8bce5-e1a6-40db-a811-72e98dbfbd45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.767664] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.810545] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.470s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.813792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.835s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.813792] env[69982]: DEBUG nova.objects.instance [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'resources' on Instance uuid cf08cf32-f3d4-494f-a51b-a40616e76429 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1159.901920] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 97%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.920519] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865434, 'name': PowerOffVM_Task, 'duration_secs': 0.23647} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.924227] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.924534] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.925265] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865433, 'name': CreateVM_Task, 'duration_secs': 0.338469} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.925500] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4e41e62d-e1c7-4795-ad45-e05b23484165 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.927377] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1159.927851] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.928096] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.928481] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1159.929263] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-629eef7e-a71d-4851-be75-15ff3a5c4946 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.937353] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1159.937353] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b72d18-c556-53dd-ddc4-4f08ba29ff17" [ 1159.937353] env[69982]: _type = "Task" [ 1159.937353] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.948353] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b72d18-c556-53dd-ddc4-4f08ba29ff17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.999654] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.999654] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.999654] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleting the datastore file [datastore1] ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.999654] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d72e1408-6607-4ef2-af09-73102df59ae1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.006201] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for the task: (returnval){ [ 1160.006201] env[69982]: value = "task-3865436" [ 1160.006201] env[69982]: _type = "Task" [ 1160.006201] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.023446] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865436, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.394461] env[69982]: INFO nova.scheduler.client.report [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocation for migration b5bad333-1770-474d-ba7d-7b8924a8217b [ 1160.401293] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task} progress is 98%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.456103] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52b72d18-c556-53dd-ddc4-4f08ba29ff17, 'name': SearchDatastore_Task, 'duration_secs': 0.013542} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.456940] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.457213] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1160.457445] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1160.457589] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1160.457767] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1160.461774] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-850cb7de-8d18-4858-b710-376810d10420 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.477349] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1160.477558] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1160.478346] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb69f03b-0ddd-4242-8e11-d4af9ab3d841 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.486986] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1160.486986] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529b1d65-f97e-031b-02b1-f37478ea23f8" [ 1160.486986] env[69982]: _type = "Task" [ 1160.486986] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.496760] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529b1d65-f97e-031b-02b1-f37478ea23f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.522554] env[69982]: DEBUG oslo_vmware.api [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Task: {'id': task-3865436, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268055} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.522927] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.523414] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.523673] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.523857] env[69982]: INFO nova.compute.manager [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1160.524270] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.524696] env[69982]: DEBUG nova.compute.manager [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1160.524781] env[69982]: DEBUG nova.network.neutron [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1160.669019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297a4f65-f37e-4927-828d-2cafd626c32c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.679270] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55b5116-b49c-43ed-8183-5cc8e07f31da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.717335] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0578ec-4f41-4877-a5a4-f6b5984c880a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.726014] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a0e8e7-a4da-4590-8412-3a32a772b679 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.744016] env[69982]: DEBUG nova.compute.provider_tree [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1160.906568] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.485s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.906568] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865427, 'name': RelocateVM_Task, 'duration_secs': 4.155025} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.906568] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1160.906568] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768074', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'name': 'volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9815a4f9-3827-4e83-b897-18edadcac55b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'serial': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1160.911264] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b535b973-0537-4f57-97aa-e21129da5d0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.926304] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77ee22b-ad4e-40ac-9ae0-ed0dac174475 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.969830] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085/volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.970983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.971977] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.972254] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.972465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.972657] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.974692] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21f5a52a-7943-45be-b20c-b915b9b31a10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.996027] env[69982]: INFO nova.compute.manager [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Terminating instance [ 1161.008145] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]529b1d65-f97e-031b-02b1-f37478ea23f8, 'name': SearchDatastore_Task, 'duration_secs': 0.026591} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.011959] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1161.011959] env[69982]: value = "task-3865437" [ 1161.011959] env[69982]: _type = "Task" [ 1161.011959] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.011959] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af10885-e8c4-452a-ab02-957c0ec08487 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.026482] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.028074] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1161.028074] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9df1c-28fe-1a4a-dab8-63899214c2e7" [ 1161.028074] env[69982]: _type = "Task" [ 1161.028074] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.039534] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9df1c-28fe-1a4a-dab8-63899214c2e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.069201] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Successfully updated port: 4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1161.176011] env[69982]: DEBUG nova.compute.manager [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Received event network-vif-plugged-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.176612] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.176840] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] Lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.177025] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] Lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.177202] env[69982]: DEBUG nova.compute.manager [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] No waiting events found dispatching network-vif-plugged-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.177370] env[69982]: WARNING nova.compute.manager [req-0fbd042d-1d1e-4a10-b49c-9209856d4bd8 req-351285d2-42a9-4411-a7d1-357bbd2918f5 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Received unexpected event network-vif-plugged-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 for instance with vm_state building and task_state spawning. [ 1161.193352] env[69982]: DEBUG nova.compute.manager [req-2607cd8d-5d53-4e48-94da-12221abb3ba2 req-baaebd59-0e89-4f6f-bff5-cf5879b50ef2 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Received event network-vif-deleted-1ded08ab-b715-4b57-81f3-69d6383c5a74 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.193462] env[69982]: INFO nova.compute.manager [req-2607cd8d-5d53-4e48-94da-12221abb3ba2 req-baaebd59-0e89-4f6f-bff5-cf5879b50ef2 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Neutron deleted interface 1ded08ab-b715-4b57-81f3-69d6383c5a74; detaching it from the instance and deleting it from the info cache [ 1161.193603] env[69982]: DEBUG nova.network.neutron [req-2607cd8d-5d53-4e48-94da-12221abb3ba2 req-baaebd59-0e89-4f6f-bff5-cf5879b50ef2 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.250917] env[69982]: DEBUG nova.scheduler.client.report [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1161.313320] env[69982]: DEBUG nova.network.neutron [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.502710] env[69982]: DEBUG nova.compute.manager [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.502710] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.502710] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939f134c-9a8f-4352-bb53-4d34d074a02b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.517084] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.521658] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06e3ca45-003d-4928-a38f-abff29e3edb7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.534155] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865437, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.540728] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1161.540728] env[69982]: value = "task-3865438" [ 1161.540728] env[69982]: _type = "Task" [ 1161.540728] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.551492] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c9df1c-28fe-1a4a-dab8-63899214c2e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015824} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.552358] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1161.552868] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1161.553273] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8a81183-56c4-4c35-8d8b-03a97533eebe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.561798] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865438, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.568742] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1161.568742] env[69982]: value = "task-3865439" [ 1161.568742] env[69982]: _type = "Task" [ 1161.568742] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.581805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.582060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.582224] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1161.584139] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865439, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.696438] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d9b99eed-1d08-4915-9101-2a8340a4e1de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.710981] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a303b51-2999-4c4a-89e6-ad105561d6ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.753101] env[69982]: DEBUG nova.compute.manager [req-2607cd8d-5d53-4e48-94da-12221abb3ba2 req-baaebd59-0e89-4f6f-bff5-cf5879b50ef2 service nova] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Detach interface failed, port_id=1ded08ab-b715-4b57-81f3-69d6383c5a74, reason: Instance ad43c35a-69bc-4c84-8869-cfde6f516b9b could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.756622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.760191] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.151s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.765053] env[69982]: INFO nova.compute.claims [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.793821] env[69982]: INFO nova.scheduler.client.report [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted allocations for instance cf08cf32-f3d4-494f-a51b-a40616e76429 [ 1161.818650] env[69982]: INFO nova.compute.manager [-] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Took 1.29 seconds to deallocate network for instance. [ 1161.959572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.959883] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.960179] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "642b42eb-eeef-401c-8feb-032d783c645a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.960425] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.960618] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.962941] env[69982]: INFO nova.compute.manager [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Terminating instance [ 1162.031714] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865437, 'name': ReconfigVM_Task, 'duration_secs': 0.735344} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.032134] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085/volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.041166] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4e440d6-1af3-42f4-b6a4-0ecb7d797b99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.078796] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865438, 'name': PowerOffVM_Task, 'duration_secs': 0.23509} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.080408] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.080603] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.080927] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1162.080927] env[69982]: value = "task-3865440" [ 1162.080927] env[69982]: _type = "Task" [ 1162.080927] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.081547] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8be3b2e-3206-493d-869e-b3614e4a6ebe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.093547] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865439, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.103500] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865440, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.133738] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1162.184033] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.184647] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.184647] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore2] e5f7aebd-22a8-47fd-9b73-09791aecae56 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.184811] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99c4e6c9-9913-4c70-ae2c-0d2e60809697 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.192996] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1162.192996] env[69982]: value = "task-3865442" [ 1162.192996] env[69982]: _type = "Task" [ 1162.192996] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.203073] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.304796] env[69982]: DEBUG oslo_concurrency.lockutils [None req-23a401f9-db7f-4c15-a138-a3dfb5b942e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "cf08cf32-f3d4-494f-a51b-a40616e76429" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.190s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.326161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.352264] env[69982]: DEBUG nova.network.neutron [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating instance_info_cache with network_info: [{"id": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "address": "fa:16:3e:e5:7a:1a", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b426f54-9c", "ovs_interfaceid": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.469856] env[69982]: DEBUG nova.compute.manager [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1162.470078] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1162.471082] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5ebebc-06be-4ebd-b67b-33c31cf69d6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.479828] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1162.480113] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccbb3629-0c8d-4bf8-b883-b8b8176bd48b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.487759] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1162.487759] env[69982]: value = "task-3865443" [ 1162.487759] env[69982]: _type = "Task" [ 1162.487759] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.499450] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.580612] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865439, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.580918] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1162.581178] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1162.581598] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-122189ad-913d-4c9f-b485-b02a7861cd4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.593668] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865440, 'name': ReconfigVM_Task, 'duration_secs': 0.219395} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.595438] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768074', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'name': 'volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9815a4f9-3827-4e83-b897-18edadcac55b', 'attached_at': '', 'detached_at': '', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'serial': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1162.596183] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1162.596183] env[69982]: value = "task-3865444" [ 1162.596183] env[69982]: _type = "Task" [ 1162.596183] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.596474] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c83f6965-e734-455d-a079-f5650d3ed10f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.608743] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865444, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.610548] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1162.610548] env[69982]: value = "task-3865445" [ 1162.610548] env[69982]: _type = "Task" [ 1162.610548] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.620447] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865445, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.708421] env[69982]: DEBUG oslo_vmware.api [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.47499} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.708779] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.709083] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.709312] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.709496] env[69982]: INFO nova.compute.manager [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1162.709742] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.709953] env[69982]: DEBUG nova.compute.manager [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1162.710059] env[69982]: DEBUG nova.network.neutron [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1162.860177] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.860177] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Instance network_info: |[{"id": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "address": "fa:16:3e:e5:7a:1a", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b426f54-9c", "ovs_interfaceid": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1162.860177] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:7a:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b426f54-9cfa-4f17-ac93-6cc3529b9b86', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.870812] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.876372] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.877330] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-748a9638-e216-49ed-9050-9ee796816821 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.907349] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.907349] env[69982]: value = "task-3865446" [ 1162.907349] env[69982]: _type = "Task" [ 1162.907349] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.923720] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865446, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.002657] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865443, 'name': PowerOffVM_Task, 'duration_secs': 0.275963} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.002657] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.002657] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.002876] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fea5d417-9319-44b6-be08-12d702969ea9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.069478] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e45064-588d-44b4-b2c1-c29c377575c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.077594] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1163.077594] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1163.077594] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleting the datastore file [datastore1] 642b42eb-eeef-401c-8feb-032d783c645a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1163.077594] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8b9dd29-3be4-4cb7-b4cd-4a70576b48db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.085125] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc7b073-1f02-4afa-9713-6c2126b00866 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.090245] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for the task: (returnval){ [ 1163.090245] env[69982]: value = "task-3865448" [ 1163.090245] env[69982]: _type = "Task" [ 1163.090245] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.133868] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8096bf7-4269-4892-ae88-19d2e53337e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.140487] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865448, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.148124] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865445, 'name': Rename_Task, 'duration_secs': 0.1535} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.151236] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1163.152055] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865444, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07515} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.152306] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff79016f-3828-4e5a-a058-1f83eebf4bb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.155488] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff28a6a3-db0b-46bf-afb4-4ce3f3f93401 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.159417] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1163.160249] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcd7c58-3dd0-49a3-9559-18a87dc44f2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.178275] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1163.178275] env[69982]: value = "task-3865449" [ 1163.178275] env[69982]: _type = "Task" [ 1163.178275] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.178810] env[69982]: DEBUG nova.compute.provider_tree [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.197178] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.201497] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6b01f36-f521-4b4c-ad7e-5fee90efd8c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.219340] env[69982]: DEBUG nova.compute.manager [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Received event network-changed-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1163.219525] env[69982]: DEBUG nova.compute.manager [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Refreshing instance network info cache due to event network-changed-4b426f54-9cfa-4f17-ac93-6cc3529b9b86. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1163.219735] env[69982]: DEBUG oslo_concurrency.lockutils [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] Acquiring lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.219872] env[69982]: DEBUG oslo_concurrency.lockutils [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] Acquired lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.220038] env[69982]: DEBUG nova.network.neutron [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Refreshing network info cache for port 4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1163.229839] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865449, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.234558] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1163.234558] env[69982]: value = "task-3865450" [ 1163.234558] env[69982]: _type = "Task" [ 1163.234558] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.234828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.235020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.247278] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.418207] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865446, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.601600] env[69982]: DEBUG oslo_vmware.api [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Task: {'id': task-3865448, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157802} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.602255] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.602255] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1163.602255] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1163.602255] env[69982]: INFO nova.compute.manager [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1163.602906] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1163.602906] env[69982]: DEBUG nova.compute.manager [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1163.602906] env[69982]: DEBUG nova.network.neutron [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1163.640401] env[69982]: DEBUG nova.network.neutron [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.698853] env[69982]: DEBUG nova.scheduler.client.report [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1163.702249] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865449, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.740435] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1163.747637] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865450, 'name': ReconfigVM_Task, 'duration_secs': 0.291446} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.748191] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf/4fd5ea57-dc28-4d56-abbc-53a3c71394bf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1163.749433] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4136e102-5b8a-4abb-b8ef-461c332f8d84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.758586] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1163.758586] env[69982]: value = "task-3865451" [ 1163.758586] env[69982]: _type = "Task" [ 1163.758586] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.770614] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865451, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.924197] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865446, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.078550] env[69982]: DEBUG nova.network.neutron [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updated VIF entry in instance network info cache for port 4b426f54-9cfa-4f17-ac93-6cc3529b9b86. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1164.079060] env[69982]: DEBUG nova.network.neutron [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating instance_info_cache with network_info: [{"id": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "address": "fa:16:3e:e5:7a:1a", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b426f54-9c", "ovs_interfaceid": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.143191] env[69982]: INFO nova.compute.manager [-] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Took 1.43 seconds to deallocate network for instance. [ 1164.200148] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865449, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.204730] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.205284] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1164.208724] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.206s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.208928] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.211623] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.444s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.211806] env[69982]: DEBUG nova.objects.instance [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lazy-loading 'resources' on Instance uuid 1315a51d-6d0f-4e6c-9ae1-6af96b74104f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.244526] env[69982]: INFO nova.scheduler.client.report [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted allocations for instance 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7 [ 1164.265441] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.269437] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865451, 'name': Rename_Task, 'duration_secs': 0.134753} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.269659] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1164.269917] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be14ff7b-ecf0-4a30-a896-392c7a9e6538 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.278429] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1164.278429] env[69982]: value = "task-3865452" [ 1164.278429] env[69982]: _type = "Task" [ 1164.278429] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.288441] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865452, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.420426] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865446, 'name': CreateVM_Task, 'duration_secs': 1.124019} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.420426] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1164.420426] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.420426] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.420840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1164.421081] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ce61b17-9c96-495e-b325-cd2e70a6c521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.427339] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1164.427339] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52469c00-6aa0-01a8-3612-d83bcbff4e7f" [ 1164.427339] env[69982]: _type = "Task" [ 1164.427339] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.438519] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52469c00-6aa0-01a8-3612-d83bcbff4e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.582673] env[69982]: DEBUG oslo_concurrency.lockutils [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] Releasing lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.582965] env[69982]: DEBUG nova.compute.manager [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Received event network-vif-deleted-299c46e3-a90a-4290-8d1e-2eb3e4d336db {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.583197] env[69982]: INFO nova.compute.manager [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Neutron deleted interface 299c46e3-a90a-4290-8d1e-2eb3e4d336db; detaching it from the instance and deleting it from the info cache [ 1164.583453] env[69982]: DEBUG nova.network.neutron [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.653079] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.680937] env[69982]: DEBUG nova.network.neutron [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.699100] env[69982]: DEBUG oslo_vmware.api [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865449, 'name': PowerOnVM_Task, 'duration_secs': 1.0687} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.699453] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.699693] env[69982]: INFO nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1164.699756] env[69982]: DEBUG nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.700591] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe619b0d-70ee-41ae-852a-f0c36b81b8ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.712419] env[69982]: DEBUG nova.compute.utils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1164.714053] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1164.714238] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.755355] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1b5946e9-bae7-47ef-b24f-ed5b8cd57c9f tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "4597a0b8-2c04-4755-8e0d-e00e5cdaacd7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.433s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.779044] env[69982]: DEBUG nova.policy [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db5e62cfeadb4f8290fea53c93fd0189', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6d2d65079fb46d8a9b1a31d2eab9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.795821] env[69982]: DEBUG oslo_vmware.api [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865452, 'name': PowerOnVM_Task, 'duration_secs': 0.499883} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.796065] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1164.796209] env[69982]: DEBUG nova.compute.manager [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1164.797064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0297f002-1038-47d5-a9dc-96c5facbe54d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.809385] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.809385] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.942154] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52469c00-6aa0-01a8-3612-d83bcbff4e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.010223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.946276] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.946658] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1164.946930] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.947116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.947313] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1164.948496] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70865098-f586-45f3-9e81-e2dfa9c585d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.959675] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2406e069-b4ab-4dd5-b0af-7f53b80c9a8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.964217] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1164.964426] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1164.965641] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cfbd5ca-da6d-4eef-ad83-4ed5c411666e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.972857] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f45b1e-b9c4-4680-9cc9-c19c0167ef5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.978948] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1164.978948] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c5face-b53d-7279-76b0-45d4362a3276" [ 1164.978948] env[69982]: _type = "Task" [ 1164.978948] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.012715] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5e69b9-b66a-4543-a359-4e28e8692576 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.020093] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c5face-b53d-7279-76b0-45d4362a3276, 'name': SearchDatastore_Task, 'duration_secs': 0.010279} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.021645] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba374328-32ae-4f90-8edb-e3eb98dcd186 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.029969] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1435e704-a8b1-452d-a1e1-558b9c518df8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.035924] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1165.035924] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52951788-3f77-d058-17f7-8d452c388f43" [ 1165.035924] env[69982]: _type = "Task" [ 1165.035924] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.049322] env[69982]: DEBUG nova.compute.provider_tree [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.057221] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52951788-3f77-d058-17f7-8d452c388f43, 'name': SearchDatastore_Task, 'duration_secs': 0.009882} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.057221] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.057465] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 191d4433-cae3-48af-9c83-fa67499ad49c/191d4433-cae3-48af-9c83-fa67499ad49c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1165.057600] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed55c02c-f9be-46a8-b0ac-ff6c3a6c1179 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.066026] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1165.066026] env[69982]: value = "task-3865453" [ 1165.066026] env[69982]: _type = "Task" [ 1165.066026] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.078836] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865453, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.086242] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9088642f-88dd-4288-9752-1bedc9470075 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.095585] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e95e8d6-d38a-464a-82d4-5a16a447309b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.126643] env[69982]: DEBUG nova.compute.manager [req-7e4dd826-5794-4ff4-b814-a93ce512061d req-1cdbdf9f-dd70-40ad-927b-9224825251de service nova] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Detach interface failed, port_id=299c46e3-a90a-4290-8d1e-2eb3e4d336db, reason: Instance e5f7aebd-22a8-47fd-9b73-09791aecae56 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1165.149964] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Successfully created port: 56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1165.184079] env[69982]: INFO nova.compute.manager [-] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Took 1.58 seconds to deallocate network for instance. [ 1165.220367] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1165.225477] env[69982]: INFO nova.compute.manager [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Took 27.31 seconds to build instance. [ 1165.249734] env[69982]: DEBUG nova.compute.manager [req-e1ecc3f2-cf9f-41de-a34d-3e40b08d5433 req-f0e0d73c-090c-4ef5-bef8-60824ddcfac5 service nova] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Received event network-vif-deleted-b3a7809c-bd2b-4433-aab2-dc4c413eff31 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1165.314158] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1165.326688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.553119] env[69982]: DEBUG nova.scheduler.client.report [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1165.578345] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865453, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493556} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.578601] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 191d4433-cae3-48af-9c83-fa67499ad49c/191d4433-cae3-48af-9c83-fa67499ad49c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1165.578822] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1165.579132] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81d8ef10-1f02-4099-b540-c6393036e40a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.587010] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1165.587010] env[69982]: value = "task-3865454" [ 1165.587010] env[69982]: _type = "Task" [ 1165.587010] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.599552] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865454, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.693353] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.703429] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.703687] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.709430] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.709662] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.709867] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.710059] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.710235] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.712463] env[69982]: INFO nova.compute.manager [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Terminating instance [ 1165.729159] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e675c074-9bbe-40c0-95ad-bdca725dcab3 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.831s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.837735] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.058500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.847s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.064073] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.735s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.064406] env[69982]: DEBUG nova.objects.instance [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lazy-loading 'resources' on Instance uuid ad43c35a-69bc-4c84-8869-cfde6f516b9b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1166.080062] env[69982]: INFO nova.scheduler.client.report [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleted allocations for instance 1315a51d-6d0f-4e6c-9ae1-6af96b74104f [ 1166.097931] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865454, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065185} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.098255] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1166.099338] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58090e7b-f78f-4930-92c8-06e26eb5b695 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.123414] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 191d4433-cae3-48af-9c83-fa67499ad49c/191d4433-cae3-48af-9c83-fa67499ad49c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1166.123793] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b9fab13-109a-4670-b03b-751eb80318e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.146560] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1166.146560] env[69982]: value = "task-3865455" [ 1166.146560] env[69982]: _type = "Task" [ 1166.146560] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.157829] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865455, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.206568] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1166.220461] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "refresh_cache-4fd5ea57-dc28-4d56-abbc-53a3c71394bf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.220461] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquired lock "refresh_cache-4fd5ea57-dc28-4d56-abbc-53a3c71394bf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.220461] env[69982]: DEBUG nova.network.neutron [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1166.233840] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1166.265193] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1166.265562] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1166.265793] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1166.266064] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1166.266293] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1166.266516] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1166.266840] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1166.267055] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1166.267234] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1166.267399] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1166.267824] env[69982]: DEBUG nova.virt.hardware [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1166.268769] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1959ee7-f5dd-4e3d-8587-7d1e53ca9094 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.280436] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8f5c22-1535-4c2f-b322-7e00ca7c706c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.561590] env[69982]: DEBUG nova.compute.manager [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.561915] env[69982]: DEBUG nova.compute.manager [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing instance network info cache due to event network-changed-ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1166.562095] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] Acquiring lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.562217] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] Acquired lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.562384] env[69982]: DEBUG nova.network.neutron [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Refreshing network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1166.591333] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aebab065-5c2a-4431-805c-b974bf9335f3 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "1315a51d-6d0f-4e6c-9ae1-6af96b74104f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.271s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.659529] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.732465] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.751625] env[69982]: DEBUG nova.network.neutron [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1166.805269] env[69982]: DEBUG nova.network.neutron [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.830520] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Successfully updated port: 56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.838123] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17fc411-3577-464b-8f70-0080958f3fdf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.851633] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87cd812-5d67-47a4-bd47-dece4a1b2dbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.887415] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b9a519-498a-40db-b42c-5191d70848a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.897503] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2924e35-c9a3-4cf6-bad4-77b0fe3dd5b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.914200] env[69982]: DEBUG nova.compute.provider_tree [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1167.160470] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865455, 'name': ReconfigVM_Task, 'duration_secs': 0.817987} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.160775] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 191d4433-cae3-48af-9c83-fa67499ad49c/191d4433-cae3-48af-9c83-fa67499ad49c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.161618] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f5231f0-e2df-49aa-981b-0b04de300c2a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.169039] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1167.169039] env[69982]: value = "task-3865456" [ 1167.169039] env[69982]: _type = "Task" [ 1167.169039] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.180332] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865456, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.290675] env[69982]: DEBUG nova.compute.manager [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Received event network-vif-plugged-56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1167.291134] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.291851] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.292345] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.292639] env[69982]: DEBUG nova.compute.manager [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] No waiting events found dispatching network-vif-plugged-56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1167.292935] env[69982]: WARNING nova.compute.manager [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Received unexpected event network-vif-plugged-56cebe1d-243c-4f51-b0a0-200e18141707 for instance with vm_state building and task_state spawning. [ 1167.295159] env[69982]: DEBUG nova.compute.manager [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Received event network-changed-56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1167.295159] env[69982]: DEBUG nova.compute.manager [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Refreshing instance network info cache due to event network-changed-56cebe1d-243c-4f51-b0a0-200e18141707. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1167.295159] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Acquiring lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.295159] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Acquired lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.295159] env[69982]: DEBUG nova.network.neutron [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Refreshing network info cache for port 56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1167.312590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Releasing lock "refresh_cache-4fd5ea57-dc28-4d56-abbc-53a3c71394bf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.314208] env[69982]: DEBUG nova.compute.manager [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.314208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.314528] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf4f1ab-aa5c-4f74-afc1-a44b1f349855 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.320734] env[69982]: DEBUG nova.network.neutron [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updated VIF entry in instance network info cache for port ff8d3aec-2392-4a4a-80c2-aa0499153235. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1167.321145] env[69982]: DEBUG nova.network.neutron [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [{"id": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "address": "fa:16:3e:7d:30:f1", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff8d3aec-23", "ovs_interfaceid": "ff8d3aec-2392-4a4a-80c2-aa0499153235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.324742] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.325260] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4f2253b-5b82-4649-ac9c-b4bb9b19c956 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.334645] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1167.334645] env[69982]: value = "task-3865457" [ 1167.334645] env[69982]: _type = "Task" [ 1167.334645] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.340383] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.349249] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.417046] env[69982]: DEBUG nova.scheduler.client.report [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.467906] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.468335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.468571] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.468770] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.468960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.471339] env[69982]: INFO nova.compute.manager [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Terminating instance [ 1167.679530] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865456, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.826564] env[69982]: DEBUG oslo_concurrency.lockutils [req-dd182b11-640e-4e3d-863e-0eaa93900951 req-829a057f-41b7-4f2c-8674-796d50a5e856 service nova] Releasing lock "refresh_cache-31f56d0e-7c64-4fe3-917e-7ebb814ae924" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.827725] env[69982]: DEBUG nova.network.neutron [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1167.845158] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865457, 'name': PowerOffVM_Task, 'duration_secs': 0.133074} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.845483] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.845606] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.845866] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-907e6a59-36c5-44a0-94c9-ac18a790b3d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.874256] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.874531] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.874742] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Deleting the datastore file [datastore2] 4fd5ea57-dc28-4d56-abbc-53a3c71394bf {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.875064] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da5000cf-4d52-4d16-b0c2-6e933c38824a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.883812] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for the task: (returnval){ [ 1167.883812] env[69982]: value = "task-3865459" [ 1167.883812] env[69982]: _type = "Task" [ 1167.883812] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.893250] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.904020] env[69982]: DEBUG nova.network.neutron [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.922775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.925863] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.661s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.927409] env[69982]: INFO nova.compute.claims [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1167.949399] env[69982]: INFO nova.scheduler.client.report [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Deleted allocations for instance ad43c35a-69bc-4c84-8869-cfde6f516b9b [ 1167.977383] env[69982]: DEBUG nova.compute.manager [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.977383] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.977763] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197e85b3-6433-4aca-b8ab-f610dade9d5a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.987866] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.988229] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a1fb25d7-486e-48e1-8f78-24505f050ac8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.998265] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1167.998265] env[69982]: value = "task-3865460" [ 1167.998265] env[69982]: _type = "Task" [ 1167.998265] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.008690] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865460, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.139068] env[69982]: DEBUG nova.compute.manager [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1168.179705] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865456, 'name': Rename_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.393931] env[69982]: DEBUG oslo_vmware.api [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Task: {'id': task-3865459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20449} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.394172] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.394368] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.394570] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.394760] env[69982]: INFO nova.compute.manager [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1168.395023] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.395223] env[69982]: DEBUG nova.compute.manager [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.395321] env[69982]: DEBUG nova.network.neutron [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1168.405849] env[69982]: DEBUG oslo_concurrency.lockutils [req-463e338c-fd44-4666-a2e6-0a4bab30b853 req-f0538c7d-6621-4bc2-896a-e35e0926ea8d service nova] Releasing lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.406228] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.406387] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1168.411872] env[69982]: DEBUG nova.network.neutron [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1168.460680] env[69982]: DEBUG oslo_concurrency.lockutils [None req-29fe3f65-c799-4e98-b8fa-8ff900cb201a tempest-ServersNegativeTestJSON-620845051 tempest-ServersNegativeTestJSON-620845051-project-member] Lock "ad43c35a-69bc-4c84-8869-cfde6f516b9b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.627s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.509105] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865460, 'name': PowerOffVM_Task, 'duration_secs': 0.219585} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.509406] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1168.509577] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1168.509837] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bde6ff9-3b57-4b6c-b142-704571e2eb95 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.576843] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1168.576998] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1168.577184] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleting the datastore file [datastore1] 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1168.577468] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f08614c2-26da-4c52-abae-46a77d166480 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.584507] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for the task: (returnval){ [ 1168.584507] env[69982]: value = "task-3865462" [ 1168.584507] env[69982]: _type = "Task" [ 1168.584507] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.590946] env[69982]: DEBUG nova.compute.manager [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Received event network-changed-ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.591241] env[69982]: DEBUG nova.compute.manager [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Refreshing instance network info cache due to event network-changed-ed266e98-9354-46dd-a173-6c25f605f719. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1168.591520] env[69982]: DEBUG oslo_concurrency.lockutils [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.593539] env[69982]: DEBUG oslo_concurrency.lockutils [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.593539] env[69982]: DEBUG nova.network.neutron [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Refreshing network info cache for port ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1168.596887] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.658882] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.680617] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865456, 'name': Rename_Task, 'duration_secs': 1.220269} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.680897] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1168.681220] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-664e2e6e-b32e-4daf-9648-1760b1a7dce1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.688244] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1168.688244] env[69982]: value = "task-3865463" [ 1168.688244] env[69982]: _type = "Task" [ 1168.688244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.697357] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.914437] env[69982]: DEBUG nova.network.neutron [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.940501] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1169.100918] env[69982]: DEBUG oslo_vmware.api [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Task: {'id': task-3865462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172351} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.104182] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.104464] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.104718] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.104970] env[69982]: INFO nova.compute.manager [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1169.105286] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.106991] env[69982]: DEBUG nova.compute.manager [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.107208] env[69982]: DEBUG nova.network.neutron [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1169.109473] env[69982]: DEBUG nova.network.neutron [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [{"id": "56cebe1d-243c-4f51-b0a0-200e18141707", "address": "fa:16:3e:49:24:7a", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56cebe1d-24", "ovs_interfaceid": "56cebe1d-243c-4f51-b0a0-200e18141707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.169224] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da01bc1-d004-4471-9963-1d5a8f64d3ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.179990] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7454b85-c5e4-411b-a07c-8eaa7e7a8e69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.219417] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f94a84-2de6-4364-8278-f70d08d24875 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.232150] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865463, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.236023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e64e73-ffaf-4e54-bcbd-90e3d76bb2f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.251240] env[69982]: DEBUG nova.compute.provider_tree [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.418190] env[69982]: INFO nova.compute.manager [-] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Took 1.02 seconds to deallocate network for instance. [ 1169.620480] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.620480] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance network_info: |[{"id": "56cebe1d-243c-4f51-b0a0-200e18141707", "address": "fa:16:3e:49:24:7a", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56cebe1d-24", "ovs_interfaceid": "56cebe1d-243c-4f51-b0a0-200e18141707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1169.620480] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:24:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56cebe1d-243c-4f51-b0a0-200e18141707', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1169.632187] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Creating folder: Project (d6d2d65079fb46d8a9b1a31d2eab9829). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1169.633013] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31ba7e4b-a125-4ade-94a5-af7cd2a90e0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.648171] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Created folder: Project (d6d2d65079fb46d8a9b1a31d2eab9829) in parent group-v767796. [ 1169.648171] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Creating folder: Instances. Parent ref: group-v768088. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1169.648171] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b50ae4fc-c6bf-4927-bf11-c24253b6beff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.662380] env[69982]: DEBUG nova.network.neutron [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updated VIF entry in instance network info cache for port ed266e98-9354-46dd-a173-6c25f605f719. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1169.662975] env[69982]: DEBUG nova.network.neutron [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.667330] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Created folder: Instances in parent group-v768088. [ 1169.667708] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.668292] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1169.668701] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-562812cd-5c5b-482c-ac36-c22e8505bac6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.698085] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1169.698085] env[69982]: value = "task-3865466" [ 1169.698085] env[69982]: _type = "Task" [ 1169.698085] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.712079] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865466, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.725100] env[69982]: DEBUG oslo_vmware.api [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865463, 'name': PowerOnVM_Task, 'duration_secs': 0.789575} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.725100] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.726215] env[69982]: INFO nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Took 10.34 seconds to spawn the instance on the hypervisor. [ 1169.726215] env[69982]: DEBUG nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1169.726860] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e10030-be24-4aa4-bd25-2f5983072428 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.780113] env[69982]: ERROR nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [req-4203591c-97fd-4ef8-bd33-b964d070ba89] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4203591c-97fd-4ef8-bd33-b964d070ba89"}]} [ 1169.805790] env[69982]: DEBUG nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1169.825272] env[69982]: DEBUG nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1169.825272] env[69982]: DEBUG nova.compute.provider_tree [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.848904] env[69982]: DEBUG nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1169.873088] env[69982]: DEBUG nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1169.926740] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.116595] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b57584b-32d8-424c-a594-7915ecd6f301 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.125724] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc782189-cea7-451d-a20f-cfe6bcbf91ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.162115] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9341e7ec-ac4c-46ad-8ac5-f5b037df96a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.169803] env[69982]: DEBUG oslo_concurrency.lockutils [req-215719f3-5d0d-498d-9611-b21a7e62f7ca req-0ecb40c3-9a6d-48aa-83d7-f1eb0be657be service nova] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.171523] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f789f22d-a8a8-49c3-8780-424824617298 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.189744] env[69982]: DEBUG nova.network.neutron [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.193471] env[69982]: DEBUG nova.compute.provider_tree [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.208037] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865466, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.249683] env[69982]: INFO nova.compute.manager [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Took 21.47 seconds to build instance. [ 1170.626405] env[69982]: DEBUG nova.compute.manager [req-f11d97a2-c7fa-422e-9e39-ccd528da1794 req-decef258-ba7a-4e5a-ad9a-f8587e96d92b service nova] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Received event network-vif-deleted-fe6324a8-865a-45fc-baef-1309b21878cd {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.696517] env[69982]: INFO nova.compute.manager [-] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Took 1.59 seconds to deallocate network for instance. [ 1170.713031] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865466, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.727658] env[69982]: DEBUG nova.scheduler.client.report [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 151 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1170.728062] env[69982]: DEBUG nova.compute.provider_tree [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 151 to 152 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1170.728302] env[69982]: DEBUG nova.compute.provider_tree [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.752417] env[69982]: DEBUG oslo_concurrency.lockutils [None req-31e417ac-00cd-4c9a-99b8-2392c062b228 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.981s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.209473] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.216018] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865466, 'name': CreateVM_Task, 'duration_secs': 1.363638} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.216018] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1171.216018] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.216018] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.216471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1171.216854] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51e953c3-5232-4e48-a460-0bfb93d463c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.224177] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1171.224177] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521486b4-7d9c-f97f-e12f-cf8bd791909d" [ 1171.224177] env[69982]: _type = "Task" [ 1171.224177] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.231584] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521486b4-7d9c-f97f-e12f-cf8bd791909d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.233651] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.308s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.235723] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.583s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.240593] env[69982]: DEBUG nova.objects.instance [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid e5f7aebd-22a8-47fd-9b73-09791aecae56 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.733594] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521486b4-7d9c-f97f-e12f-cf8bd791909d, 'name': SearchDatastore_Task, 'duration_secs': 0.010653} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.733942] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.734315] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1171.735183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.735183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.735183] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1171.735363] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25f14883-017b-409d-b136-7b81513745a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.742463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "3807ad61-d78d-4bca-8970-92587f0ea81d" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.742463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "3807ad61-d78d-4bca-8970-92587f0ea81d" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.747888] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1171.748097] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1171.754226] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc733bb7-0356-4ead-819d-304fa0d9e586 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.761940] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1171.761940] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228db27-8b6f-93aa-cb4d-727d7141ff24" [ 1171.761940] env[69982]: _type = "Task" [ 1171.761940] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.772878] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228db27-8b6f-93aa-cb4d-727d7141ff24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.990792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ab929e-76d4-4eaf-aa84-79dc23e964d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.999140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da704415-ad3b-4d95-b771-5ec947cb7e46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.036644] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35984f5-c441-4ad6-af09-a5c8852830d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.046059] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2f87a2-95bf-411c-b867-81150d1c8c61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.061377] env[69982]: DEBUG nova.compute.provider_tree [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.244943] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "3807ad61-d78d-4bca-8970-92587f0ea81d" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.502s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.245587] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1172.276376] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5228db27-8b6f-93aa-cb4d-727d7141ff24, 'name': SearchDatastore_Task, 'duration_secs': 0.011266} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.277205] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69262b6b-0bd5-4e31-8537-731d40f35e5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.283676] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1172.283676] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5201022c-d341-126f-c1b3-47577a74e8be" [ 1172.283676] env[69982]: _type = "Task" [ 1172.283676] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.294832] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5201022c-d341-126f-c1b3-47577a74e8be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.565146] env[69982]: DEBUG nova.scheduler.client.report [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.693620] env[69982]: DEBUG nova.compute.manager [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Received event network-changed-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.693957] env[69982]: DEBUG nova.compute.manager [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Refreshing instance network info cache due to event network-changed-4b426f54-9cfa-4f17-ac93-6cc3529b9b86. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1172.694572] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] Acquiring lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.694732] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] Acquired lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.694903] env[69982]: DEBUG nova.network.neutron [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Refreshing network info cache for port 4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1172.751886] env[69982]: DEBUG nova.compute.utils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1172.752894] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1172.754201] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1172.794955] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5201022c-d341-126f-c1b3-47577a74e8be, 'name': SearchDatastore_Task, 'duration_secs': 0.011024} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.795522] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1172.795660] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ce24e165-230a-44bb-ae46-d1479e71585a/ce24e165-230a-44bb-ae46-d1479e71585a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1172.795997] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fa014b8-8d19-4274-a1ff-cf0391d7b443 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.806696] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1172.806696] env[69982]: value = "task-3865467" [ 1172.806696] env[69982]: _type = "Task" [ 1172.806696] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.816173] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865467, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.821083] env[69982]: DEBUG nova.policy [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dacaa04488c64d89b5d1505b7b6e622c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ba937b5d3ab404da52dccc9d44ba2d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.071212] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.074671] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.748s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.074955] env[69982]: DEBUG nova.objects.instance [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1173.111095] env[69982]: INFO nova.scheduler.client.report [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance e5f7aebd-22a8-47fd-9b73-09791aecae56 [ 1173.257878] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1173.317721] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865467, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495221} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.318363] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] ce24e165-230a-44bb-ae46-d1479e71585a/ce24e165-230a-44bb-ae46-d1479e71585a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1173.318363] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1173.318514] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62ec88fc-a8ba-4a1e-8f72-5db25fc2d294 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.325870] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1173.325870] env[69982]: value = "task-3865468" [ 1173.325870] env[69982]: _type = "Task" [ 1173.325870] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.334870] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865468, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.480398] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Successfully created port: f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.623913] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ddc6b6dd-6ad2-423b-b002-06537a4efd02 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "e5f7aebd-22a8-47fd-9b73-09791aecae56" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.652s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.776026] env[69982]: DEBUG nova.network.neutron [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updated VIF entry in instance network info cache for port 4b426f54-9cfa-4f17-ac93-6cc3529b9b86. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1173.776026] env[69982]: DEBUG nova.network.neutron [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating instance_info_cache with network_info: [{"id": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "address": "fa:16:3e:e5:7a:1a", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b426f54-9c", "ovs_interfaceid": "4b426f54-9cfa-4f17-ac93-6cc3529b9b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.836250] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865468, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070826} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.836525] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1173.837370] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0f7e81-623d-483d-83ad-1222d093285d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.860508] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] ce24e165-230a-44bb-ae46-d1479e71585a/ce24e165-230a-44bb-ae46-d1479e71585a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1173.860816] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63d56ef9-8692-4989-a816-95b64f4621d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.881494] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1173.881494] env[69982]: value = "task-3865469" [ 1173.881494] env[69982]: _type = "Task" [ 1173.881494] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.890856] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.088074] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b3677639-2ea4-4344-9435-7814707fca35 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.090327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.397s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.090545] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.094981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.257s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1174.096525] env[69982]: INFO nova.compute.claims [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1174.219559] env[69982]: INFO nova.scheduler.client.report [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Deleted allocations for instance 642b42eb-eeef-401c-8feb-032d783c645a [ 1174.278183] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1174.280498] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bac8a15-a811-486b-bb8c-9144e0fc9fa9 req-c3ff72e5-142f-4593-b9d2-f83cca4cccb3 service nova] Releasing lock "refresh_cache-191d4433-cae3-48af-9c83-fa67499ad49c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.302474] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1174.303237] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1174.303237] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1174.303237] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1174.303237] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1174.303467] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1174.303604] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1174.303772] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1174.303934] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1174.304106] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1174.304276] env[69982]: DEBUG nova.virt.hardware [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1174.305228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aab68e1-2fe8-4177-b152-61cdab7b3090 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.314401] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ddfc15-9065-4a97-b158-0da78d9333b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.393522] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865469, 'name': ReconfigVM_Task, 'duration_secs': 0.294151} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.394041] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfigured VM instance instance-0000006d to attach disk [datastore1] ce24e165-230a-44bb-ae46-d1479e71585a/ce24e165-230a-44bb-ae46-d1479e71585a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.394591] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77d01c69-034a-4afa-8409-0bcec36bd468 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.401666] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1174.401666] env[69982]: value = "task-3865470" [ 1174.401666] env[69982]: _type = "Task" [ 1174.401666] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.416585] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865470, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.729894] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e0426c2f-9bd5-43ce-87de-3bf0002b70a7 tempest-DeleteServersTestJSON-1273084896 tempest-DeleteServersTestJSON-1273084896-project-member] Lock "642b42eb-eeef-401c-8feb-032d783c645a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.770s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1174.915855] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865470, 'name': Rename_Task, 'duration_secs': 0.149651} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.916258] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1174.916692] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84962974-7d18-4c4b-b34d-bda320e32d2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.924619] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1174.924619] env[69982]: value = "task-3865471" [ 1174.924619] env[69982]: _type = "Task" [ 1174.924619] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.935810] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865471, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.214301] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.214301] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.299062] env[69982]: DEBUG nova.compute.manager [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Received event network-vif-plugged-f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1175.299327] env[69982]: DEBUG oslo_concurrency.lockutils [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] Acquiring lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.299548] env[69982]: DEBUG oslo_concurrency.lockutils [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.299652] env[69982]: DEBUG oslo_concurrency.lockutils [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.299803] env[69982]: DEBUG nova.compute.manager [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] No waiting events found dispatching network-vif-plugged-f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1175.299961] env[69982]: WARNING nova.compute.manager [req-d42ba5ed-6c98-4dca-801c-acde7696cb08 req-6f5b7fde-a6e3-4f07-9a90-ac031a32971a service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Received unexpected event network-vif-plugged-f7df84f7-0f9d-4a14-8754-7df70a42ac3f for instance with vm_state building and task_state spawning. [ 1175.362495] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954cc1d5-b844-4acf-9884-05f5c3e7cb0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.373578] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896fc73f-b516-4753-8196-5359c713b6ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.414383] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Successfully updated port: f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1175.416663] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9feaca55-9b86-4ad9-8596-5129c14c03e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.424930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f43ee99-db6a-4243-a725-e1a41a4d24fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.445873] env[69982]: DEBUG nova.compute.provider_tree [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1175.450874] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865471, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.719949] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1175.920507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.920507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquired lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.920837] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1175.940306] env[69982]: DEBUG oslo_vmware.api [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865471, 'name': PowerOnVM_Task, 'duration_secs': 0.711851} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.940581] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.940782] env[69982]: INFO nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Took 9.71 seconds to spawn the instance on the hypervisor. [ 1175.940973] env[69982]: DEBUG nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1175.941846] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dba17e-db89-4414-b11b-d98a16cf3a1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.980558] env[69982]: ERROR nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [req-91505afa-37fd-4f23-9849-2a3c03b252a7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-91505afa-37fd-4f23-9849-2a3c03b252a7"}]} [ 1175.997233] env[69982]: DEBUG nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1176.012773] env[69982]: DEBUG nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1176.013108] env[69982]: DEBUG nova.compute.provider_tree [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.027033] env[69982]: DEBUG nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1176.046016] env[69982]: DEBUG nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1176.155236] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.155953] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.238343] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.261814] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d24935-57ff-40f7-8d0e-19835ac1ee82 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.270747] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb46703-9c2f-4a3f-9016-7c63d9b53797 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.302306] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abe2dfa-d0e3-4a8e-801c-822edbc519e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.310263] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578c5936-5903-489c-ac14-a25f4ce82069 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.325012] env[69982]: DEBUG nova.compute.provider_tree [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.454808] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1176.464803] env[69982]: INFO nova.compute.manager [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Took 23.88 seconds to build instance. [ 1176.601971] env[69982]: DEBUG nova.network.neutron [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Updating instance_info_cache with network_info: [{"id": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "address": "fa:16:3e:92:52:c9", "network": {"id": "485ed3d4-7f39-49a9-a586-ee363a130723", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2134251109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba937b5d3ab404da52dccc9d44ba2d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7df84f7-0f", "ovs_interfaceid": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.657455] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1176.855166] env[69982]: DEBUG nova.scheduler.client.report [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1176.855448] env[69982]: DEBUG nova.compute.provider_tree [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 154 to 155 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1176.855629] env[69982]: DEBUG nova.compute.provider_tree [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1176.966941] env[69982]: DEBUG oslo_concurrency.lockutils [None req-607c81d2-328c-4d56-965d-8cd2c2751838 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.388s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.108019] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Releasing lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.108019] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Instance network_info: |[{"id": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "address": "fa:16:3e:92:52:c9", "network": {"id": "485ed3d4-7f39-49a9-a586-ee363a130723", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2134251109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba937b5d3ab404da52dccc9d44ba2d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7df84f7-0f", "ovs_interfaceid": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1177.108019] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:52:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d859f07-052d-4a69-bdf1-24261a6a6daa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7df84f7-0f9d-4a14-8754-7df70a42ac3f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1177.114820] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Creating folder: Project (4ba937b5d3ab404da52dccc9d44ba2d4). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1177.115294] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56538b4f-96e9-48fa-b113-d804968d5dda {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.130015] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Created folder: Project (4ba937b5d3ab404da52dccc9d44ba2d4) in parent group-v767796. [ 1177.132020] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Creating folder: Instances. Parent ref: group-v768091. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1177.132020] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39d24668-62d9-4f4d-96ad-9a1e0b76daef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.142483] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Created folder: Instances in parent group-v768091. [ 1177.142799] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1177.143013] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1177.143242] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac658e30-3613-4cf2-bcb9-26deeea9f17b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.164791] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1177.164791] env[69982]: value = "task-3865475" [ 1177.164791] env[69982]: _type = "Task" [ 1177.164791] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.176045] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865475, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.181146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.327038] env[69982]: DEBUG nova.compute.manager [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Received event network-changed-f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.327384] env[69982]: DEBUG nova.compute.manager [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Refreshing instance network info cache due to event network-changed-f7df84f7-0f9d-4a14-8754-7df70a42ac3f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.327627] env[69982]: DEBUG oslo_concurrency.lockutils [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] Acquiring lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.327833] env[69982]: DEBUG oslo_concurrency.lockutils [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] Acquired lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.328131] env[69982]: DEBUG nova.network.neutron [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Refreshing network info cache for port f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1177.361493] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.266s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.362234] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1177.366335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.634s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.369766] env[69982]: INFO nova.compute.claims [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1177.492238] env[69982]: DEBUG nova.compute.manager [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Received event network-changed-56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1177.492828] env[69982]: DEBUG nova.compute.manager [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Refreshing instance network info cache due to event network-changed-56cebe1d-243c-4f51-b0a0-200e18141707. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1177.493642] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] Acquiring lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.494218] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] Acquired lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.494218] env[69982]: DEBUG nova.network.neutron [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Refreshing network info cache for port 56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1177.678081] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865475, 'name': CreateVM_Task, 'duration_secs': 0.351042} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.678264] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1177.678970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.679150] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.679470] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1177.679744] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ac82da4-c353-4faf-82e5-eef932eb8087 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.685700] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1177.685700] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52413e59-f3f4-7637-3151-f33fe875f818" [ 1177.685700] env[69982]: _type = "Task" [ 1177.685700] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.696031] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52413e59-f3f4-7637-3151-f33fe875f818, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.878900] env[69982]: DEBUG nova.compute.utils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1177.880304] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1177.880481] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1177.956643] env[69982]: DEBUG nova.policy [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1178.145783] env[69982]: DEBUG nova.network.neutron [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Updated VIF entry in instance network info cache for port f7df84f7-0f9d-4a14-8754-7df70a42ac3f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1178.146228] env[69982]: DEBUG nova.network.neutron [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Updating instance_info_cache with network_info: [{"id": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "address": "fa:16:3e:92:52:c9", "network": {"id": "485ed3d4-7f39-49a9-a586-ee363a130723", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-2134251109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ba937b5d3ab404da52dccc9d44ba2d4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7df84f7-0f", "ovs_interfaceid": "f7df84f7-0f9d-4a14-8754-7df70a42ac3f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.197978] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52413e59-f3f4-7637-3151-f33fe875f818, 'name': SearchDatastore_Task, 'duration_secs': 0.012566} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.201453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.201846] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.201967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.202135] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.202325] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.202647] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11ae8628-1dcd-4c5b-91b8-8caeb4abdbfe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.213862] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.214136] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1178.215103] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5320538-9643-4fa5-995a-6c7bc191d239 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.222916] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1178.222916] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5292012d-8723-905b-ab8a-4fced5ce722e" [ 1178.222916] env[69982]: _type = "Task" [ 1178.222916] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.231737] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5292012d-8723-905b-ab8a-4fced5ce722e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.284652] env[69982]: DEBUG nova.network.neutron [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updated VIF entry in instance network info cache for port 56cebe1d-243c-4f51-b0a0-200e18141707. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1178.285067] env[69982]: DEBUG nova.network.neutron [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [{"id": "56cebe1d-243c-4f51-b0a0-200e18141707", "address": "fa:16:3e:49:24:7a", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56cebe1d-24", "ovs_interfaceid": "56cebe1d-243c-4f51-b0a0-200e18141707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.384529] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1178.406688] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Successfully created port: 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1178.619266] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6668a24e-6ae3-49f8-9466-7f92d489601f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.628461] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064ea7d9-f583-47c3-ad31-00701b855711 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.663768] env[69982]: DEBUG oslo_concurrency.lockutils [req-0a2cc61d-d975-4b88-baf5-97aae237cd88 req-ec2304be-bab3-475e-b0a6-d437e712a1cd service nova] Releasing lock "refresh_cache-245c6ca2-4dbf-46b5-9b08-e08db224d09f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.666204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2cf90b-9bcb-4a80-ab70-27308a25c08d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.673895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8973b0-5355-4b25-bb11-ff325a409957 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.689471] env[69982]: DEBUG nova.compute.provider_tree [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1178.734007] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5292012d-8723-905b-ab8a-4fced5ce722e, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.734907] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb79350-987e-4473-ade3-32d3e2f087f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.741576] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1178.741576] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214bbb2-8ea5-772a-b7a1-5d1db8d3e67b" [ 1178.741576] env[69982]: _type = "Task" [ 1178.741576] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.752365] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214bbb2-8ea5-772a-b7a1-5d1db8d3e67b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.788160] env[69982]: DEBUG oslo_concurrency.lockutils [req-f8222581-642e-4901-9421-2ac809fd7155 req-198668fc-3ef9-4ecf-92a2-bc20d9df6252 service nova] Releasing lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.192956] env[69982]: DEBUG nova.scheduler.client.report [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1179.253123] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214bbb2-8ea5-772a-b7a1-5d1db8d3e67b, 'name': SearchDatastore_Task, 'duration_secs': 0.011177} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.253450] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.253793] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 245c6ca2-4dbf-46b5-9b08-e08db224d09f/245c6ca2-4dbf-46b5-9b08-e08db224d09f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1179.254098] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f9f3b9a-3628-4734-ba6b-a5c786004a9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.260847] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1179.260847] env[69982]: value = "task-3865476" [ 1179.260847] env[69982]: _type = "Task" [ 1179.260847] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.270123] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865476, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.396924] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1179.442502] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1179.442811] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1179.442990] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1179.443217] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1179.443368] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1179.443572] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1179.443802] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1179.443976] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1179.444182] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1179.444363] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1179.444540] env[69982]: DEBUG nova.virt.hardware [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1179.445651] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ef3919-4e48-490e-bcf7-e830f9cb5ca3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.455866] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202292cd-8041-414a-9143-9f95ee26b102 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.704028] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.704028] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1179.705940] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 11.047s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.771897] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865476, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.895148] env[69982]: DEBUG nova.compute.manager [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-plugged-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.895654] env[69982]: DEBUG oslo_concurrency.lockutils [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.895853] env[69982]: DEBUG oslo_concurrency.lockutils [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.896124] env[69982]: DEBUG oslo_concurrency.lockutils [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.896321] env[69982]: DEBUG nova.compute.manager [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] No waiting events found dispatching network-vif-plugged-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1179.896712] env[69982]: WARNING nova.compute.manager [req-588eab84-07fa-4c59-8187-827936376871 req-4458440a-b616-4c0a-a3c6-39adf31b331b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received unexpected event network-vif-plugged-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 for instance with vm_state building and task_state spawning. [ 1179.993103] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Successfully updated port: 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1180.213673] env[69982]: DEBUG nova.compute.utils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1180.217472] env[69982]: INFO nova.compute.claims [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.221731] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1180.221809] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1180.272898] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865476, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527142} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.273130] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 245c6ca2-4dbf-46b5-9b08-e08db224d09f/245c6ca2-4dbf-46b5-9b08-e08db224d09f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.273362] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.273648] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af885ef3-b41d-454d-b6d7-6ef9dd926c22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.280474] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1180.280474] env[69982]: value = "task-3865477" [ 1180.280474] env[69982]: _type = "Task" [ 1180.280474] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.287467] env[69982]: DEBUG nova.policy [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08d17e3ac5f40c6890dc8dcc4c559d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efe64e69253d49a6a1146f240506ce39', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1180.294700] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865477, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.497467] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1180.497712] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.497844] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1180.652075] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Successfully created port: 98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1180.722778] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1180.727475] env[69982]: INFO nova.compute.resource_tracker [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating resource usage from migration 883348c9-d966-48bb-a09d-bb8cdbfbe687 [ 1180.798018] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865477, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068512} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.799286] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1180.800318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921e180c-594e-4f89-9a49-93965d6dfccf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.828790] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 245c6ca2-4dbf-46b5-9b08-e08db224d09f/245c6ca2-4dbf-46b5-9b08-e08db224d09f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1180.832235] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b420112-04da-4d7a-affa-a656bc973393 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.855925] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1180.855925] env[69982]: value = "task-3865478" [ 1180.855925] env[69982]: _type = "Task" [ 1180.855925] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.865670] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865478, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.987694] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c257822-5ac6-452e-82dc-b49925607f34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.996218] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42979c9b-c3fc-4bd8-b561-049f49f3e7a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.030222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a801ee69-ff9a-4d88-be5d-e9e6ffdda70d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.038220] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9f619f-0f88-4c5d-b9db-394f7e551f4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.054021] env[69982]: DEBUG nova.compute.provider_tree [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.060950] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1181.204050] env[69982]: DEBUG nova.network.neutron [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.284123] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.284323] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.284465] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.284667] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.284837] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.285000] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.285171] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.285327] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1181.285489] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1181.366021] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865478, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.557942] env[69982]: DEBUG nova.scheduler.client.report [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1181.707654] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.707870] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Instance network_info: |[{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1181.708706] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:65:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18ad43f6-284f-4ffe-8c0c-638aa5dc1be9', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1181.716441] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1181.717029] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1181.717029] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e6b522c-ed8c-4b2e-8f44-698076a6e316 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.737822] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1181.739774] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1181.739774] env[69982]: value = "task-3865479" [ 1181.739774] env[69982]: _type = "Task" [ 1181.739774] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.748856] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865479, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.766687] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1181.766823] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1181.766973] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1181.767249] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1181.767469] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1181.767767] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1181.768170] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1181.768449] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1181.768662] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1181.768907] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1181.769138] env[69982]: DEBUG nova.virt.hardware [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1181.770081] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb5aa51-ae9b-4018-b1ef-1e9209fcc6fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.778744] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b295a98-58c9-49fb-b852-756303d6e6c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.793677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.864567] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865478, 'name': ReconfigVM_Task, 'duration_secs': 0.579616} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.864951] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 245c6ca2-4dbf-46b5-9b08-e08db224d09f/245c6ca2-4dbf-46b5-9b08-e08db224d09f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.865724] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4910fe32-12a2-444a-80a7-8a667c078347 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.872738] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1181.872738] env[69982]: value = "task-3865480" [ 1181.872738] env[69982]: _type = "Task" [ 1181.872738] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.885490] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865480, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.940532] env[69982]: DEBUG nova.compute.manager [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-changed-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.940738] env[69982]: DEBUG nova.compute.manager [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing instance network info cache due to event network-changed-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1181.941253] env[69982]: DEBUG oslo_concurrency.lockutils [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.941430] env[69982]: DEBUG oslo_concurrency.lockutils [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.941627] env[69982]: DEBUG nova.network.neutron [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing network info cache for port 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1182.063577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.357s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.063821] env[69982]: INFO nova.compute.manager [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Migrating [ 1182.076973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.150s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.077291] env[69982]: DEBUG nova.objects.instance [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lazy-loading 'resources' on Instance uuid 4fd5ea57-dc28-4d56-abbc-53a3c71394bf {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1182.251643] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865479, 'name': CreateVM_Task, 'duration_secs': 0.408731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.252621] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Successfully updated port: 98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1182.253750] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1182.255554] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.255750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.256080] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1182.256820] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3c93eb1-eca2-443d-af99-259518530477 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.263340] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1182.263340] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52febf53-f12c-9f9c-05d9-d15816823b8d" [ 1182.263340] env[69982]: _type = "Task" [ 1182.263340] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.274137] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52febf53-f12c-9f9c-05d9-d15816823b8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.383360] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865480, 'name': Rename_Task, 'duration_secs': 0.160969} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.383685] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1182.383944] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45830f06-782f-4090-a9d2-17f140807c78 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.390497] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1182.390497] env[69982]: value = "task-3865481" [ 1182.390497] env[69982]: _type = "Task" [ 1182.390497] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.398684] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.585084] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.585300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.585450] env[69982]: DEBUG nova.network.neutron [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1182.642015] env[69982]: DEBUG nova.network.neutron [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updated VIF entry in instance network info cache for port 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1182.642409] env[69982]: DEBUG nova.network.neutron [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.758488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.758590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.758957] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1182.777746] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52febf53-f12c-9f9c-05d9-d15816823b8d, 'name': SearchDatastore_Task, 'duration_secs': 0.010677} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.778083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.778341] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1182.778689] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1182.779161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1182.779161] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1182.779378] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bd4ca92-b78c-435f-8f40-45e6ad78f4cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.788800] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1182.788997] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1182.792207] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cc326d6-5705-413b-837a-b3bc7a68e9bc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.798352] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1182.798352] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5241045c-ea66-0033-a383-6220be661745" [ 1182.798352] env[69982]: _type = "Task" [ 1182.798352] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.807551] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5241045c-ea66-0033-a383-6220be661745, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.823419] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9f3f91-ccbc-430f-a586-e62605d47436 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.832519] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fd644c-2827-4179-b757-24703745d8df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.866701] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de0eb90-3d4d-49f4-a1cc-7366848028a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.875611] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d23f68-23fd-4200-922c-5a3e4550d930 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.890781] env[69982]: DEBUG nova.compute.provider_tree [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.904122] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865481, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.146509] env[69982]: DEBUG oslo_concurrency.lockutils [req-b902afdd-29b5-4351-b87b-098ef89e1b20 req-6d41d3ec-e609-49ad-95ac-ea84a707ccdc service nova] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.304898] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1183.311818] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5241045c-ea66-0033-a383-6220be661745, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.312802] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80c41b17-41c0-48eb-99c0-71d973b3be82 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.319122] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1183.319122] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525fc30b-fd48-58b8-9c5f-39b7635945c1" [ 1183.319122] env[69982]: _type = "Task" [ 1183.319122] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.322912] env[69982]: DEBUG nova.network.neutron [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.332817] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525fc30b-fd48-58b8-9c5f-39b7635945c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.399286] env[69982]: DEBUG nova.scheduler.client.report [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1183.412531] env[69982]: DEBUG oslo_vmware.api [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865481, 'name': PowerOnVM_Task, 'duration_secs': 0.53481} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.412994] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.413368] env[69982]: INFO nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Took 9.14 seconds to spawn the instance on the hypervisor. [ 1183.413708] env[69982]: DEBUG nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.415011] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5a9d87-8815-43ba-92f5-155a72b2df7e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.459126] env[69982]: DEBUG nova.network.neutron [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1183.826057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.832746] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525fc30b-fd48-58b8-9c5f-39b7635945c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009784} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.833253] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.833503] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 36b7f89e-7552-40b9-ada4-01abfcea8310/36b7f89e-7552-40b9-ada4-01abfcea8310.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1183.833783] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69d7df2b-0b46-42cb-b0ce-2268fe7501db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.841871] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1183.841871] env[69982]: value = "task-3865482" [ 1183.841871] env[69982]: _type = "Task" [ 1183.841871] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1183.851362] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865482, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.908151] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.832s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.911981] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.701s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.912352] env[69982]: DEBUG nova.objects.instance [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lazy-loading 'resources' on Instance uuid 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1183.937815] env[69982]: INFO nova.scheduler.client.report [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Deleted allocations for instance 4fd5ea57-dc28-4d56-abbc-53a3c71394bf [ 1183.944476] env[69982]: INFO nova.compute.manager [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Took 19.70 seconds to build instance. [ 1183.961954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.962466] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Instance network_info: |[{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1183.963916] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:d2:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98b299b9-3425-43e1-95bf-4acd909b7ad4', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1183.973543] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1183.975146] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1183.975146] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95c2fa3e-4efa-45ea-945a-7d5067364420 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.993654] env[69982]: DEBUG nova.compute.manager [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Received event network-vif-plugged-98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.993905] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.994133] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.994309] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.994486] env[69982]: DEBUG nova.compute.manager [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] No waiting events found dispatching network-vif-plugged-98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.994682] env[69982]: WARNING nova.compute.manager [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Received unexpected event network-vif-plugged-98b299b9-3425-43e1-95bf-4acd909b7ad4 for instance with vm_state building and task_state spawning. [ 1183.994839] env[69982]: DEBUG nova.compute.manager [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Received event network-changed-98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.994992] env[69982]: DEBUG nova.compute.manager [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Refreshing instance network info cache due to event network-changed-98b299b9-3425-43e1-95bf-4acd909b7ad4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1183.995195] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.995331] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.995486] env[69982]: DEBUG nova.network.neutron [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Refreshing network info cache for port 98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1184.006728] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1184.006728] env[69982]: value = "task-3865483" [ 1184.006728] env[69982]: _type = "Task" [ 1184.006728] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.021590] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865483, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.352432] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865482, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474128} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.352838] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 36b7f89e-7552-40b9-ada4-01abfcea8310/36b7f89e-7552-40b9-ada4-01abfcea8310.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1184.353190] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1184.353557] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b789ca66-5097-4e00-95fb-f597d66790fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.359886] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1184.359886] env[69982]: value = "task-3865484" [ 1184.359886] env[69982]: _type = "Task" [ 1184.359886] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.369513] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.445562] env[69982]: DEBUG oslo_concurrency.lockutils [None req-47ef1107-c6c5-4bd2-a65f-c64e3efa4114 tempest-ServerShowV254Test-1461682144 tempest-ServerShowV254Test-1461682144-project-member] Lock "4fd5ea57-dc28-4d56-abbc-53a3c71394bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.736s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.446640] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0b32d198-026d-4dde-a325-89d6b527a5d2 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.212s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.518062] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865483, 'name': CreateVM_Task, 'duration_secs': 0.513259} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.518264] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1184.518934] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.519293] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.519592] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1184.520166] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c45f9fda-4f48-4286-955c-0832f976a487 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.524698] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1184.524698] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526ab9ae-a91b-1cb0-0101-0bd318210076" [ 1184.524698] env[69982]: _type = "Task" [ 1184.524698] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.540356] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526ab9ae-a91b-1cb0-0101-0bd318210076, 'name': SearchDatastore_Task, 'duration_secs': 0.010595} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.542955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.543050] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1184.543332] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.543488] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.543698] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.544349] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97359a77-6c0d-4ef4-a804-7f82dcdbd5f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.552831] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.553045] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1184.556119] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2aa5f9d5-0360-48c0-af5a-54db2e74b369 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.561845] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.562181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.562284] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.562463] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1184.562630] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.564716] env[69982]: INFO nova.compute.manager [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Terminating instance [ 1184.567879] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1184.567879] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e16aa9-523d-d7bf-c083-a8e554ec447e" [ 1184.567879] env[69982]: _type = "Task" [ 1184.567879] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.584898] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e16aa9-523d-d7bf-c083-a8e554ec447e, 'name': SearchDatastore_Task, 'duration_secs': 0.010284} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.588966] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1dc942f-cc0d-48c7-a574-5c35d63823af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.596384] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1184.596384] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527a2c87-cc77-e573-a3b2-672e377679e7" [ 1184.596384] env[69982]: _type = "Task" [ 1184.596384] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.608632] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527a2c87-cc77-e573-a3b2-672e377679e7, 'name': SearchDatastore_Task, 'duration_secs': 0.01119} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.608888] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.609157] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1184.609413] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db5c7332-8719-43e2-becd-59fd00d28557 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.618051] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1184.618051] env[69982]: value = "task-3865485" [ 1184.618051] env[69982]: _type = "Task" [ 1184.618051] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.627714] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.629498] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d51e80-a275-4547-a2ad-180b3f961de1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.641546] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15c6c4b-4c88-405e-b183-3f5f1915f4e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.679779] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c5f1de-6cd0-494c-acd4-e32acdc68c91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.692363] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88854c97-5e9f-4992-ad0b-56d6df17b7f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.708132] env[69982]: DEBUG nova.compute.provider_tree [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.761972] env[69982]: DEBUG nova.network.neutron [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updated VIF entry in instance network info cache for port 98b299b9-3425-43e1-95bf-4acd909b7ad4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1184.762513] env[69982]: DEBUG nova.network.neutron [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.872169] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.294643} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.872486] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1184.873417] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d0b22f-71a4-43bf-8bf9-d2d0dfb89633 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.901153] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 36b7f89e-7552-40b9-ada4-01abfcea8310/36b7f89e-7552-40b9-ada4-01abfcea8310.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1184.901532] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f48845ef-bf10-49a4-9383-845cfe8f46bf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.923832] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1184.923832] env[69982]: value = "task-3865486" [ 1184.923832] env[69982]: _type = "Task" [ 1184.923832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.934851] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865486, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.069168] env[69982]: DEBUG nova.compute.manager [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1185.069435] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.070459] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce4c5e6-4d52-454a-ae0a-b0421ccb5944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.079107] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.079430] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8884c18-4f5c-443a-a354-f999bd349ba6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.087495] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1185.087495] env[69982]: value = "task-3865487" [ 1185.087495] env[69982]: _type = "Task" [ 1185.087495] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.098795] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.129172] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499743} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.129478] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1185.129716] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1185.129988] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51ad0a36-7955-48ac-b55a-3e6e4db9e26c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.137132] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1185.137132] env[69982]: value = "task-3865488" [ 1185.137132] env[69982]: _type = "Task" [ 1185.137132] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.146989] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.211410] env[69982]: DEBUG nova.scheduler.client.report [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1185.266188] env[69982]: DEBUG oslo_concurrency.lockutils [req-63d0ea1d-0b25-4939-9e94-9bc0113679e5 req-9a5f499c-6d29-44bf-aaca-1793526aa831 service nova] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.350469] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a8f09f-6ff0-4b02-8615-dffadbb164c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.371784] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1185.434234] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865486, 'name': ReconfigVM_Task, 'duration_secs': 0.329369} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.434490] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 36b7f89e-7552-40b9-ada4-01abfcea8310/36b7f89e-7552-40b9-ada4-01abfcea8310.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1185.435163] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9dc8f054-0735-4e4d-8282-bceedadd7604 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.442036] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1185.442036] env[69982]: value = "task-3865489" [ 1185.442036] env[69982]: _type = "Task" [ 1185.442036] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.453690] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865489, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.598529] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865487, 'name': PowerOffVM_Task, 'duration_secs': 0.305681} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.598808] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1185.598979] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.599249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc731f05-707e-4606-ae08-e915f99aea0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.649240] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074193} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.649387] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1185.650206] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b14ff824-6858-42c2-aaa9-80998148d1dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.673040] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1185.674556] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81b91d30-7b52-422f-93f9-57a4d62a540b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.689157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1185.689393] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1185.689867] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Deleting the datastore file [datastore1] 245c6ca2-4dbf-46b5-9b08-e08db224d09f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1185.690462] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31f58313-d124-44e3-9418-72ec95185965 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.697552] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for the task: (returnval){ [ 1185.697552] env[69982]: value = "task-3865491" [ 1185.697552] env[69982]: _type = "Task" [ 1185.697552] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.699248] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1185.699248] env[69982]: value = "task-3865492" [ 1185.699248] env[69982]: _type = "Task" [ 1185.699248] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.711325] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.714642] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865492, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.719817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.809s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.722733] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.484s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.724106] env[69982]: INFO nova.compute.claims [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1185.742118] env[69982]: INFO nova.scheduler.client.report [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Deleted allocations for instance 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a [ 1185.879058] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1185.879505] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d834e6dd-5e5c-41b2-9ca6-9fc7af7eb811 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.888330] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1185.888330] env[69982]: value = "task-3865493" [ 1185.888330] env[69982]: _type = "Task" [ 1185.888330] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.898345] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.952460] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865489, 'name': Rename_Task, 'duration_secs': 0.139742} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.952685] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1185.952969] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97096bb9-af4e-4752-bef1-267f451a5ade {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.961098] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1185.961098] env[69982]: value = "task-3865494" [ 1185.961098] env[69982]: _type = "Task" [ 1185.961098] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.969372] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.213400] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865492, 'name': ReconfigVM_Task, 'duration_secs': 0.294018} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.216832] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1186.217526] env[69982]: DEBUG oslo_vmware.api [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Task: {'id': task-3865491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170208} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.217730] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc3a1591-4d46-4600-bb8e-1269bddda8fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.219440] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.219624] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.219801] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.219973] env[69982]: INFO nova.compute.manager [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1186.220221] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1186.220495] env[69982]: DEBUG nova.compute.manager [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1186.220650] env[69982]: DEBUG nova.network.neutron [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1186.230150] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1186.230150] env[69982]: value = "task-3865495" [ 1186.230150] env[69982]: _type = "Task" [ 1186.230150] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.240170] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865495, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.252091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1118e797-ef42-4dfd-b024-52e10620a974 tempest-ServersAdminNegativeTestJSON-1880926754 tempest-ServersAdminNegativeTestJSON-1880926754-project-member] Lock "6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.784s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.399448] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865493, 'name': PowerOffVM_Task, 'duration_secs': 0.187135} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.399768] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.399966] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1186.477141] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865494, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.633800] env[69982]: DEBUG nova.compute.manager [req-14879da5-71d5-4eed-8552-b28320540848 req-eca3da6f-697f-4e15-b8a5-6a5beb208aa7 service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Received event network-vif-deleted-f7df84f7-0f9d-4a14-8754-7df70a42ac3f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1186.634040] env[69982]: INFO nova.compute.manager [req-14879da5-71d5-4eed-8552-b28320540848 req-eca3da6f-697f-4e15-b8a5-6a5beb208aa7 service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Neutron deleted interface f7df84f7-0f9d-4a14-8754-7df70a42ac3f; detaching it from the instance and deleting it from the info cache [ 1186.634224] env[69982]: DEBUG nova.network.neutron [req-14879da5-71d5-4eed-8552-b28320540848 req-eca3da6f-697f-4e15-b8a5-6a5beb208aa7 service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.743451] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865495, 'name': Rename_Task, 'duration_secs': 0.136765} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.743763] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1186.744026] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c310284-e8a7-493d-9c98-8d9f9de2aba8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.750960] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1186.750960] env[69982]: value = "task-3865496" [ 1186.750960] env[69982]: _type = "Task" [ 1186.750960] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.761846] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.906702] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1186.908239] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1186.908912] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1186.913932] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-99a7fbca-598f-4579-85cf-fbf0409431ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.926070] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b5e5bf-69ce-4d07-b9a5-5115ec58d60e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.937365] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bbe5693-71ab-4f28-ad9b-f5b9b1677209 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.941531] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1186.941531] env[69982]: value = "task-3865497" [ 1186.941531] env[69982]: _type = "Task" [ 1186.941531] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.981609] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43952796-49c3-4792-bff0-cc856f2f1bb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.988224] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.993804] env[69982]: DEBUG oslo_vmware.api [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865494, 'name': PowerOnVM_Task, 'duration_secs': 0.520996} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.996107] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1186.996335] env[69982]: INFO nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Took 7.60 seconds to spawn the instance on the hypervisor. [ 1186.996526] env[69982]: DEBUG nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1186.997384] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe24bf27-7f6d-4d28-9943-ec5966d3b288 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.000894] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b09651-6115-44f7-a82e-ef9f2965aef2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.020065] env[69982]: DEBUG nova.compute.provider_tree [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1187.113162] env[69982]: DEBUG nova.network.neutron [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.122340] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.122600] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.137625] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6153164b-22c0-4243-b7bb-f87326597b44 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.148337] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc78e5f-0e3b-481b-b4ae-1bdfb6fd67d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.182997] env[69982]: DEBUG nova.compute.manager [req-14879da5-71d5-4eed-8552-b28320540848 req-eca3da6f-697f-4e15-b8a5-6a5beb208aa7 service nova] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Detach interface failed, port_id=f7df84f7-0f9d-4a14-8754-7df70a42ac3f, reason: Instance 245c6ca2-4dbf-46b5-9b08-e08db224d09f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1187.262266] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865496, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.453224] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865497, 'name': ReconfigVM_Task, 'duration_secs': 0.190828} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.453669] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1187.527130] env[69982]: DEBUG nova.scheduler.client.report [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1187.533312] env[69982]: INFO nova.compute.manager [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Took 21.71 seconds to build instance. [ 1187.617326] env[69982]: INFO nova.compute.manager [-] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Took 1.40 seconds to deallocate network for instance. [ 1187.625643] env[69982]: DEBUG nova.compute.utils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1187.762649] env[69982]: DEBUG oslo_vmware.api [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865496, 'name': PowerOnVM_Task, 'duration_secs': 0.728784} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.762888] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1187.766019] env[69982]: INFO nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Took 6.03 seconds to spawn the instance on the hypervisor. [ 1187.766019] env[69982]: DEBUG nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1187.766019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1591ec53-977b-4616-9d78-c1d8cf4b22bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.961886] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1187.963079] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.963404] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1187.963776] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.964068] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1187.964204] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1187.964423] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1187.964590] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1187.964817] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1187.964999] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1187.965193] env[69982]: DEBUG nova.virt.hardware [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1187.970987] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1187.971307] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a16831a-f36e-44d4-99a7-98a4deed4701 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.992287] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1187.992287] env[69982]: value = "task-3865498" [ 1187.992287] env[69982]: _type = "Task" [ 1187.992287] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.003105] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865498, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.034804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.034936] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1188.037723] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.857s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.039233] env[69982]: INFO nova.compute.claims [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1188.041991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ad800f0d-c597-4ed8-9dfc-6e1273fb5bc7 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.233s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.124741] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.127947] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.285930] env[69982]: INFO nova.compute.manager [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Took 21.57 seconds to build instance. [ 1188.503780] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865498, 'name': ReconfigVM_Task, 'duration_secs': 0.348339} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.504326] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1188.505888] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e640bac4-5092-428b-bbd0-e362a0361c3f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.532752] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085/volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.533568] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bde56c2-7055-47e9-8dfa-3a542cab5e37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.549936] env[69982]: DEBUG nova.compute.utils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1188.553801] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1188.554203] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1188.565134] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1188.565134] env[69982]: value = "task-3865499" [ 1188.565134] env[69982]: _type = "Task" [ 1188.565134] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.576576] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865499, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.682462] env[69982]: DEBUG nova.policy [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c736b9b5674d47fcb03602e4eaea8cd3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7036b34d089a4ca0b779e9ab9b86cc77', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1188.789866] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e6ecf52-b806-46c2-8dc4-003b6ebf64e4 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.086s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.942035] env[69982]: DEBUG nova.compute.manager [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-changed-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1188.942160] env[69982]: DEBUG nova.compute.manager [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing instance network info cache due to event network-changed-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1188.942380] env[69982]: DEBUG oslo_concurrency.lockutils [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.942621] env[69982]: DEBUG oslo_concurrency.lockutils [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.942663] env[69982]: DEBUG nova.network.neutron [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing network info cache for port 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1189.058896] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1189.083033] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865499, 'name': ReconfigVM_Task, 'duration_secs': 0.350317} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.083228] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085/volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.083687] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1189.221919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.222211] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.222633] env[69982]: INFO nova.compute.manager [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Attaching volume 2dfcb6d8-aab9-4474-86ca-8a2566cef584 to /dev/sdb [ 1189.225600] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Successfully created port: 2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1189.240850] env[69982]: DEBUG nova.compute.manager [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Received event network-changed-98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.242026] env[69982]: DEBUG nova.compute.manager [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Refreshing instance network info cache due to event network-changed-98b299b9-3425-43e1-95bf-4acd909b7ad4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1189.242026] env[69982]: DEBUG oslo_concurrency.lockutils [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.242026] env[69982]: DEBUG oslo_concurrency.lockutils [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.242026] env[69982]: DEBUG nova.network.neutron [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Refreshing network info cache for port 98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1189.268318] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241d09b9-cdd0-4a85-a57c-9e69b7bb8d92 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.286044] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e40edda-6230-4858-81b7-8d1db24e6f8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.302492] env[69982]: DEBUG nova.virt.block_device [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating existing volume attachment record: a2f9524f-7b41-4366-a3fd-7d7c4176ec8b {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1189.472651] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6e4ac3-8590-40e3-839f-371f527627c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.481173] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7069b54-8efa-441c-ac69-d349e82568ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.520612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf826904-b481-43f6-b91a-1931bf5cb34a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.530236] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5413c0-b5b7-4e35-a396-cd514a4c1652 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.546273] env[69982]: DEBUG nova.compute.provider_tree [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1189.592059] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1cf07c-fc9b-4376-96c7-f6068e191835 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.619598] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df48a2f4-d195-4588-ae36-57fdfff6e8cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.645251] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1190.053788] env[69982]: DEBUG nova.scheduler.client.report [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.075528] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1190.112896] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1190.113219] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1190.113410] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1190.113648] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1190.113862] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1190.114080] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1190.114418] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1190.114570] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1190.114810] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1190.115221] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1190.115221] env[69982]: DEBUG nova.virt.hardware [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1190.116774] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe83ffc-52b5-42f0-8813-fbf4cb028068 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.127010] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d5ee9a-c39d-471d-b0fb-5156d699a23e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.181371] env[69982]: DEBUG nova.network.neutron [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updated VIF entry in instance network info cache for port 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1190.181777] env[69982]: DEBUG nova.network.neutron [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.242500] env[69982]: DEBUG nova.network.neutron [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updated VIF entry in instance network info cache for port 98b299b9-3425-43e1-95bf-4acd909b7ad4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1190.242970] env[69982]: DEBUG nova.network.neutron [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.560833] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.523s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.561397] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1190.564172] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.770s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.564302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.564381] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1190.564702] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.440s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.564928] env[69982]: DEBUG nova.objects.instance [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lazy-loading 'resources' on Instance uuid 245c6ca2-4dbf-46b5-9b08-e08db224d09f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1190.566626] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5919a7-06d2-41a7-839b-7936477d7456 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.576367] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29db7a4a-c49b-40b7-a8ab-70fbe6d76d2a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.592962] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b677c041-6280-4335-ba0b-e1c961b9c5e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.601319] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d94b7b8-aa17-4628-b38d-e84c1ca063ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.636630] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179745MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1190.636803] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.686567] env[69982]: DEBUG oslo_concurrency.lockutils [req-3aaa7f0a-e72a-4b4d-b919-a7f74a4907c4 req-dd20229b-d9d1-4cd5-90fb-b2db3d617dd3 service nova] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.746634] env[69982]: DEBUG oslo_concurrency.lockutils [req-4affe2de-66bd-42ac-9bde-061c9340a114 req-8e86d384-5bcf-40df-a573-763ad578207f service nova] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.955217] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Successfully updated port: 2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1190.973906] env[69982]: DEBUG nova.compute.manager [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Received event network-vif-plugged-2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.974206] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Acquiring lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.974357] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.974522] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.974715] env[69982]: DEBUG nova.compute.manager [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] No waiting events found dispatching network-vif-plugged-2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1190.974907] env[69982]: WARNING nova.compute.manager [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Received unexpected event network-vif-plugged-2d1c1373-bca8-422f-8fb6-d622ea4afdd1 for instance with vm_state building and task_state spawning. [ 1190.975090] env[69982]: DEBUG nova.compute.manager [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Received event network-changed-2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.975247] env[69982]: DEBUG nova.compute.manager [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Refreshing instance network info cache due to event network-changed-2d1c1373-bca8-422f-8fb6-d622ea4afdd1. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1190.975427] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Acquiring lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.975563] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Acquired lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.975805] env[69982]: DEBUG nova.network.neutron [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Refreshing network info cache for port 2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1191.068237] env[69982]: DEBUG nova.compute.utils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.069670] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1191.069784] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1191.137927] env[69982]: DEBUG nova.policy [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fec33fa84b60450d95e4cb53b5aac0f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7372e00e1966430da6131e02f199ba14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1191.253817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.253817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.317171] env[69982]: DEBUG nova.network.neutron [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Port ed266e98-9354-46dd-a173-6c25f605f719 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1191.328432] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef97d368-00d4-4c63-82d1-6af9577cb89e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.338623] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d12de6e-fefe-4fee-a534-990e58842c61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.370706] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c69386c8-3e12-4196-af10-c5caa6fb6573 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.379170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdec7b74-5e5d-4a9d-b46a-5e24c89caa4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.395985] env[69982]: DEBUG nova.compute.provider_tree [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1191.461033] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.507018] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Successfully created port: f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1191.513338] env[69982]: DEBUG nova.network.neutron [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1191.573375] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1191.605269] env[69982]: DEBUG nova.network.neutron [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.755866] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1191.900969] env[69982]: DEBUG nova.scheduler.client.report [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.108270] env[69982]: DEBUG oslo_concurrency.lockutils [req-f2f8141f-7a31-4b1d-b023-9d6b40f89385 req-03fd67ea-d362-4c44-a056-61819e4b85ce service nova] Releasing lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.108817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.108817] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1192.279666] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.339348] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.339502] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.339714] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.407813] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.410282] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 1.773s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.430163] env[69982]: INFO nova.scheduler.client.report [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Deleted allocations for instance 245c6ca2-4dbf-46b5-9b08-e08db224d09f [ 1192.585049] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1192.612440] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1192.612533] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1192.612661] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1192.612844] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1192.612990] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1192.613162] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1192.613375] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1192.613529] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1192.613758] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1192.614013] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1192.614215] env[69982]: DEBUG nova.virt.hardware [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1192.616863] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283b8fb3-3e33-4b7c-96e4-d922d49d17e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.626067] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91fb98d-8d2b-4a29-b867-efc50740780d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.647170] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1192.807172] env[69982]: DEBUG nova.network.neutron [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Updating instance_info_cache with network_info: [{"id": "2d1c1373-bca8-422f-8fb6-d622ea4afdd1", "address": "fa:16:3e:96:58:6f", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d1c1373-bc", "ovs_interfaceid": "2d1c1373-bca8-422f-8fb6-d622ea4afdd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.938389] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79cff135-f779-479f-8d6b-6205f15ef875 tempest-ServerGroupTestJSON-2030788215 tempest-ServerGroupTestJSON-2030788215-project-member] Lock "245c6ca2-4dbf-46b5-9b08-e08db224d09f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.376s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.005941] env[69982]: DEBUG nova.compute.manager [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.006180] env[69982]: DEBUG oslo_concurrency.lockutils [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.006394] env[69982]: DEBUG oslo_concurrency.lockutils [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.006558] env[69982]: DEBUG oslo_concurrency.lockutils [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.006727] env[69982]: DEBUG nova.compute.manager [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] No waiting events found dispatching network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.006894] env[69982]: WARNING nova.compute.manager [req-1c65d311-c415-423b-8f6d-a0ae239bbd44 req-5166ebee-c506-4bcd-b8ab-d22948d6b24d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received unexpected event network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 for instance with vm_state building and task_state spawning. [ 1193.040164] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Successfully updated port: f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1193.310287] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "refresh_cache-6341394e-2ea2-4d77-b818-6d3bf5a32e97" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.310642] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance network_info: |[{"id": "2d1c1373-bca8-422f-8fb6-d622ea4afdd1", "address": "fa:16:3e:96:58:6f", "network": {"id": "0f1f2712-ed4f-493a-8d34-17064527ae82", "bridge": "br-int", "label": "tempest-ServersTestJSON-1747614601-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7036b34d089a4ca0b779e9ab9b86cc77", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d1c1373-bc", "ovs_interfaceid": "2d1c1373-bca8-422f-8fb6-d622ea4afdd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1193.311103] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:58:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d1c1373-bca8-422f-8fb6-d622ea4afdd1', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.319687] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.320613] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.321234] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ccc63723-20b2-42d1-b43b-b903793662cb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.349610] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.349610] env[69982]: value = "task-3865504" [ 1193.349610] env[69982]: _type = "Task" [ 1193.349610] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.362663] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865504, 'name': CreateVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.420249] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.420455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.420636] env[69982]: DEBUG nova.network.neutron [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1193.423144] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Applying migration context for instance 9815a4f9-3827-4e83-b897-18edadcac55b as it has an incoming, in-progress migration 883348c9-d966-48bb-a09d-bb8cdbfbe687. Migration status is post-migrating {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1193.424535] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating resource usage from migration 883348c9-d966-48bb-a09d-bb8cdbfbe687 [ 1193.453842] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454053] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454214] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454323] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8089e191-85df-46cd-8a6b-415bfd5d6748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454446] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 191d4433-cae3-48af-9c83-fa67499ad49c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454563] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ce24e165-230a-44bb-ae46-d1479e71585a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454710] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454847] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 54b91e61-1302-40e6-b928-fcca31cd9b3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.454980] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 883348c9-d966-48bb-a09d-bb8cdbfbe687 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1193.455119] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9815a4f9-3827-4e83-b897-18edadcac55b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.455253] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6341394e-2ea2-4d77-b818-6d3bf5a32e97 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.455347] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1193.544567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.544753] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.544950] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1193.860192] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865504, 'name': CreateVM_Task, 'duration_secs': 0.324584} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.860369] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1193.861076] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.861250] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.861762] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1193.862085] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a2e6ace-9955-4000-9d63-7202d4beebea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.867288] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1193.867288] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523b6c32-155c-e23f-e741-bf5f2194c4f0" [ 1193.867288] env[69982]: _type = "Task" [ 1193.867288] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.875819] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523b6c32-155c-e23f-e741-bf5f2194c4f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.958503] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7d84344b-cc66-4d9a-b5b4-4fd26a75648e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1193.958789] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1193.958938] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1194.105655] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1194.196164] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2a8f9f-e6fa-4854-9eeb-72bdf94c2dc8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.205694] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020dc07a-3234-4647-a168-658b6b67476f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.249055] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d4ad1c-1b81-40a7-87dc-504e1e76b958 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.263027] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc193f9-a2be-401d-846d-4d87e91d7b85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.278967] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.305226] env[69982]: DEBUG nova.network.neutron [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.308579] env[69982]: DEBUG nova.network.neutron [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.364031] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1194.364031] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768097', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'name': 'volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8089e191-85df-46cd-8a6b-415bfd5d6748', 'attached_at': '', 'detached_at': '', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'serial': '2dfcb6d8-aab9-4474-86ca-8a2566cef584'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1194.364031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ef93ec-c7fe-43a2-b141-4fcfc7577d08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.379833] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523b6c32-155c-e23f-e741-bf5f2194c4f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010708} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.393275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.393275] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.393275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.393275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.393275] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.394665] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2565c5de-938f-409c-b537-05ba6eb02806 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.398225] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0cd68ea-83c6-4df1-85f3-deb3cb191354 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.438028] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584/volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1194.438259] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3e0e488-ef1b-405a-89e2-3772f6d5f799 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.456847] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.456847] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1194.457122] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77347aeb-d245-44d4-ab0a-a80884c76d67 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.465947] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1194.465947] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52093097-aeb4-668e-7956-ff68628398cb" [ 1194.465947] env[69982]: _type = "Task" [ 1194.465947] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.467703] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1194.467703] env[69982]: value = "task-3865505" [ 1194.467703] env[69982]: _type = "Task" [ 1194.467703] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.482035] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52093097-aeb4-668e-7956-ff68628398cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.485695] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865505, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.782781] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1194.806852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.807110] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance network_info: |[{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1194.807533] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:5c:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f973e93a-f258-4ccd-a732-c323a3202bb3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1194.816024] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating folder: Project (7372e00e1966430da6131e02f199ba14). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1194.817079] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.821714] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d86f3bb5-4a33-4141-9d3f-8b64afb5b7fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.837202] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created folder: Project (7372e00e1966430da6131e02f199ba14) in parent group-v767796. [ 1194.837613] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating folder: Instances. Parent ref: group-v768099. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1194.837725] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5b90fe3-df2a-4676-bb4b-7538bf8ac89f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.853089] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created folder: Instances in parent group-v768099. [ 1194.853378] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1194.853846] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1194.854111] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c6bd1c26-5086-4d65-86c4-c438cad0d377 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.882807] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1194.882807] env[69982]: value = "task-3865508" [ 1194.882807] env[69982]: _type = "Task" [ 1194.882807] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.895200] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865508, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.982712] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865505, 'name': ReconfigVM_Task, 'duration_secs': 0.444517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.987204] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584/volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.992548] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52093097-aeb4-668e-7956-ff68628398cb, 'name': SearchDatastore_Task, 'duration_secs': 0.017178} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.992844] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d788c103-5ab2-4b24-bb94-34125133332e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.004673] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d72db00f-8935-415e-a318-3a969c4c1032 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.011500] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1195.011500] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5235bedf-0649-e87d-36de-40f8b64106a1" [ 1195.011500] env[69982]: _type = "Task" [ 1195.011500] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.016958] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1195.016958] env[69982]: value = "task-3865509" [ 1195.016958] env[69982]: _type = "Task" [ 1195.016958] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.025620] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5235bedf-0649-e87d-36de-40f8b64106a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.033078] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865509, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.050327] env[69982]: DEBUG nova.compute.manager [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.050534] env[69982]: DEBUG nova.compute.manager [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing instance network info cache due to event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1195.050761] env[69982]: DEBUG oslo_concurrency.lockutils [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.051908] env[69982]: DEBUG oslo_concurrency.lockutils [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.051908] env[69982]: DEBUG nova.network.neutron [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1195.288432] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1195.288718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.878s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.289011] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.010s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.290580] env[69982]: INFO nova.compute.claims [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1195.293156] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.293311] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1195.332504] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db5fe9e-b0bf-4bbc-b008-d2c78cf66caa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.343177] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb3f782-4c00-4054-9087-076976410a49 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.408361] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865508, 'name': CreateVM_Task, 'duration_secs': 0.355389} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.408361] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1195.408893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.409096] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.409625] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1195.410066] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d9d927-64f7-4289-960c-a6ba9ddf809f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.423459] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1195.423459] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524b3a04-ac44-baf2-893d-2b562dfc6242" [ 1195.423459] env[69982]: _type = "Task" [ 1195.423459] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.441657] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524b3a04-ac44-baf2-893d-2b562dfc6242, 'name': SearchDatastore_Task} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.442291] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.442792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1195.443241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.526820] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5235bedf-0649-e87d-36de-40f8b64106a1, 'name': SearchDatastore_Task, 'duration_secs': 0.011828} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.528037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.528037] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6341394e-2ea2-4d77-b818-6d3bf5a32e97/6341394e-2ea2-4d77-b818-6d3bf5a32e97.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.528037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.528276] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1195.528423] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca5639a9-254c-40e9-b8c5-c7cc88584d42 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.534019] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-27fd18b5-9705-4bda-91dd-854b5ae9af84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.535968] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.542398] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1195.542398] env[69982]: value = "task-3865510" [ 1195.542398] env[69982]: _type = "Task" [ 1195.542398] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.547726] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1195.548163] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1195.549549] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50e1208d-fb99-40f1-805a-203f2f3e76b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.558129] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.562958] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1195.562958] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bd890d-91af-6983-bc26-e38c3fba5a89" [ 1195.562958] env[69982]: _type = "Task" [ 1195.562958] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.572749] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bd890d-91af-6983-bc26-e38c3fba5a89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.821847] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] There are 64 instances to clean {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1195.821847] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 245c6ca2-4dbf-46b5-9b08-e08db224d09f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1195.840613] env[69982]: DEBUG nova.network.neutron [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updated VIF entry in instance network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1195.841266] env[69982]: DEBUG nova.network.neutron [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.031142] env[69982]: DEBUG oslo_vmware.api [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865509, 'name': ReconfigVM_Task, 'duration_secs': 1.007697} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.031638] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768097', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'name': 'volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8089e191-85df-46cd-8a6b-415bfd5d6748', 'attached_at': '', 'detached_at': '', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'serial': '2dfcb6d8-aab9-4474-86ca-8a2566cef584'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1196.054080] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.073637] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bd890d-91af-6983-bc26-e38c3fba5a89, 'name': SearchDatastore_Task, 'duration_secs': 0.01356} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.074467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde7c9b0-fbb8-4b7d-ac79-dfdc67fa91ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.081165] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1196.081165] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52efbf3b-fb1f-40f5-2ab2-1121b6458c12" [ 1196.081165] env[69982]: _type = "Task" [ 1196.081165] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.090297] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52efbf3b-fb1f-40f5-2ab2-1121b6458c12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.327544] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: e5f7aebd-22a8-47fd-9b73-09791aecae56] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1196.344857] env[69982]: DEBUG oslo_concurrency.lockutils [req-334289f4-e2ec-496a-bf2e-23a43e7d37be req-35dbe16c-9ce7-4bb4-abef-e50425453d64 service nova] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.465692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad388ec1-3853-4c51-b88f-e7cfdbebff20 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.495281] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd782955-7938-45d6-a222-c2f7b9344b21 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.508336] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1196.554484] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9897e0-a13d-48f4-8526-ac428eb03098 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.562127] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865510, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.572152] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1c00e5-be19-4d1e-83db-397021634a34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.612764] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe6d240-3fa1-4f18-95fd-54791e6314de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.623848] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52efbf3b-fb1f-40f5-2ab2-1121b6458c12, 'name': SearchDatastore_Task, 'duration_secs': 0.014116} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.626046] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.626285] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1196.626583] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d2e7b7c-fd95-4aa9-9562-fb0ea8929068 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.629449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6e2ed2-aa61-4e29-ab76-3e0aeaab8b91 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.638354] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1196.638354] env[69982]: value = "task-3865511" [ 1196.638354] env[69982]: _type = "Task" [ 1196.638354] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.646238] env[69982]: DEBUG nova.compute.provider_tree [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1196.655937] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865511, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.831484] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4fd5ea57-dc28-4d56-abbc-53a3c71394bf] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1197.019311] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1197.019648] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d91def28-c9ee-4862-8244-08575996dd1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.028417] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1197.028417] env[69982]: value = "task-3865512" [ 1197.028417] env[69982]: _type = "Task" [ 1197.028417] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.040527] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.060961] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865510, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.095868} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.061239] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 6341394e-2ea2-4d77-b818-6d3bf5a32e97/6341394e-2ea2-4d77-b818-6d3bf5a32e97.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1197.061463] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1197.061806] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7bc7c13a-82cd-4daf-9c8e-d5fad995d37d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.072292] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1197.072292] env[69982]: value = "task-3865513" [ 1197.072292] env[69982]: _type = "Task" [ 1197.072292] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.086009] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.109527] env[69982]: DEBUG nova.objects.instance [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'flavor' on Instance uuid 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.151956] env[69982]: DEBUG nova.scheduler.client.report [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.159272] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865511, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464465} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.159574] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1197.159796] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1197.160065] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d6eae43-e576-4e7b-9bb1-7d6b6ac831e0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.167394] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1197.167394] env[69982]: value = "task-3865514" [ 1197.167394] env[69982]: _type = "Task" [ 1197.167394] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.177015] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865514, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.334741] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 1315a51d-6d0f-4e6c-9ae1-6af96b74104f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1197.540029] env[69982]: DEBUG oslo_vmware.api [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865512, 'name': PowerOnVM_Task, 'duration_secs': 0.455993} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.540338] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1197.540501] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b652caa3-437b-4081-97bb-c91a42f3b3ab tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance '9815a4f9-3827-4e83-b897-18edadcac55b' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.583315] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.264946} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.583524] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.584486] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c39157-af67-4b9f-b1ab-c1b1caec5198 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.608023] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 6341394e-2ea2-4d77-b818-6d3bf5a32e97/6341394e-2ea2-4d77-b818-6d3bf5a32e97.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.609174] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48d8a081-dff5-458d-9c8f-3f2d7f49f767 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.627036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cf2aaf8f-e84d-4e88-936a-65bdafb6c05c tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.404s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.632896] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1197.632896] env[69982]: value = "task-3865515" [ 1197.632896] env[69982]: _type = "Task" [ 1197.632896] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.643311] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865515, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.660626] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.371s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.661217] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1197.678284] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865514, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075678} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.678564] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.679412] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f895281-ca64-4916-bcad-a1a56aa2557e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.708930] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.710273] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da508196-9687-4ca8-8531-057e51aad907 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.733110] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1197.733110] env[69982]: value = "task-3865516" [ 1197.733110] env[69982]: _type = "Task" [ 1197.733110] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.742120] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865516, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.838432] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 28518353-0bff-460f-8384-f0376280917d] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1198.144853] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865515, 'name': ReconfigVM_Task, 'duration_secs': 0.298517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.145350] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 6341394e-2ea2-4d77-b818-6d3bf5a32e97/6341394e-2ea2-4d77-b818-6d3bf5a32e97.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.146247] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ae9abcc-2dd4-4123-9ed9-38fa15695c4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.154632] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1198.154632] env[69982]: value = "task-3865517" [ 1198.154632] env[69982]: _type = "Task" [ 1198.154632] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.166229] env[69982]: DEBUG nova.compute.utils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1198.167576] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865517, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.168059] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1198.168230] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1198.221269] env[69982]: DEBUG nova.policy [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '020a36f3aea34b1a8dcc379df67518d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d4602e8ada243b0a9bf86ea9677f14b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1198.245146] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.341780] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: fea9d096-ee82-4ad1-a799-ef7aaf5026a2] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1198.458861] env[69982]: DEBUG nova.compute.manager [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1198.528969] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully created port: 42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1198.663962] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865517, 'name': Rename_Task, 'duration_secs': 0.1535} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.664366] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.664727] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-99e63906-c303-4b2f-bf70-7d124e5c0612 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.669128] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1198.673105] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1198.673105] env[69982]: value = "task-3865518" [ 1198.673105] env[69982]: _type = "Task" [ 1198.673105] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.681606] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865518, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.744089] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865516, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.787485] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully created port: 389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1198.845021] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: f8107863-4eb1-4b8e-937e-30dc1e276f33] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1198.981255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.981548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.022592] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully created port: 3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1199.188085] env[69982]: DEBUG oslo_vmware.api [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865518, 'name': PowerOnVM_Task, 'duration_secs': 0.48332} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.188536] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.190331] env[69982]: INFO nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1199.190331] env[69982]: DEBUG nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1199.190331] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051c4ba7-5828-41d1-b7c2-8b5c0d437232 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.244791] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865516, 'name': ReconfigVM_Task, 'duration_secs': 1.045651} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.245138] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.245740] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ea955a8-34cf-4c36-a85a-d79dae1d046a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.254416] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1199.254416] env[69982]: value = "task-3865519" [ 1199.254416] env[69982]: _type = "Task" [ 1199.254416] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.264210] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865519, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.347780] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d10aaf26-7100-4313-bd57-d2cfefb16e3f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1199.487853] env[69982]: INFO nova.compute.claims [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1199.683247] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1199.712816] env[69982]: INFO nova.compute.manager [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Took 23.49 seconds to build instance. [ 1199.719985] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1199.720261] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1199.720432] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1199.720619] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1199.720779] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1199.720952] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1199.721191] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1199.721370] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1199.721554] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1199.721717] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1199.721914] env[69982]: DEBUG nova.virt.hardware [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1199.722885] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170209ac-79ac-4a73-a71e-0fc1bfb317d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.732973] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716a3df4-b186-48d0-99b2-2562dcdbe62a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.765157] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865519, 'name': Rename_Task, 'duration_secs': 0.189818} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.765446] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.765713] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e2de208-c0bc-4270-9879-41d479346b58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.772871] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1199.772871] env[69982]: value = "task-3865520" [ 1199.772871] env[69982]: _type = "Task" [ 1199.772871] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.786089] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865520, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.851693] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6d0ca4a4-cdbc-41bc-a3ba-a5a54475005a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1199.929573] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.929573] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.929573] env[69982]: DEBUG nova.compute.manager [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Going to confirm migration 8 {{(pid=69982) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1199.992952] env[69982]: INFO nova.compute.resource_tracker [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating resource usage from migration 5cd5a310-8734-406a-9714-25adece293f3 [ 1200.185231] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0363254-3f95-494b-9a4f-6ed939254c43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.194592] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc359389-b291-419d-952c-df642c1b56d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.227621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6798b344-8c67-4fa0-a6ff-926ec56e4c68 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.013s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.229216] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50169ead-4c15-4bce-9892-420ce62fbb30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.237807] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45859956-767c-4514-8f57-4bf3ef66f0f0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.255686] env[69982]: DEBUG nova.compute.provider_tree [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.285168] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.356073] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 642b42eb-eeef-401c-8feb-032d783c645a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1200.467392] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.467715] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquired lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.468017] env[69982]: DEBUG nova.network.neutron [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1200.468352] env[69982]: DEBUG nova.objects.instance [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'info_cache' on Instance uuid 9815a4f9-3827-4e83-b897-18edadcac55b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.660525] env[69982]: DEBUG oslo_concurrency.lockutils [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.660792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.660982] env[69982]: DEBUG nova.compute.manager [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.662031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ceca6a-8999-416a-a84f-8f21935d3bb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.670836] env[69982]: DEBUG nova.compute.manager [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1200.671469] env[69982]: DEBUG nova.objects.instance [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'flavor' on Instance uuid 6341394e-2ea2-4d77-b818-6d3bf5a32e97 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.760244] env[69982]: DEBUG nova.scheduler.client.report [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.785563] env[69982]: DEBUG oslo_vmware.api [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865520, 'name': PowerOnVM_Task, 'duration_secs': 0.608062} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.785830] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1200.786045] env[69982]: INFO nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Took 8.20 seconds to spawn the instance on the hypervisor. [ 1200.786228] env[69982]: DEBUG nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.787337] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266a0b93-d0ff-4bc1-809f-f8c3ab47a84e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.853736] env[69982]: DEBUG nova.compute.manager [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-plugged-42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.854047] env[69982]: DEBUG oslo_concurrency.lockutils [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.854261] env[69982]: DEBUG oslo_concurrency.lockutils [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.854429] env[69982]: DEBUG oslo_concurrency.lockutils [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.854624] env[69982]: DEBUG nova.compute.manager [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] No waiting events found dispatching network-vif-plugged-42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1200.854855] env[69982]: WARNING nova.compute.manager [req-ec0020dc-d79b-4073-91ba-cfe5970c494c req-987f16d3-0ad7-4c9a-94da-01db072b9138 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received unexpected event network-vif-plugged-42cd3ec7-258a-40b0-84a1-a13b02fc0670 for instance with vm_state building and task_state spawning. [ 1200.860208] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: cf08cf32-f3d4-494f-a51b-a40616e76429] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1200.951589] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully updated port: 42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.265162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.283s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.265416] env[69982]: INFO nova.compute.manager [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Migrating [ 1201.303963] env[69982]: INFO nova.compute.manager [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Took 24.14 seconds to build instance. [ 1201.364092] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 834d66a7-4626-4d85-8e6c-db4a8ec39be0] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1201.680847] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.680847] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-77fd4859-8d7a-4fb3-895e-6707e7c95cfb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.688755] env[69982]: DEBUG oslo_vmware.api [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1201.688755] env[69982]: value = "task-3865521" [ 1201.688755] env[69982]: _type = "Task" [ 1201.688755] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.701638] env[69982]: DEBUG oslo_vmware.api [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.765115] env[69982]: DEBUG nova.network.neutron [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [{"id": "ed266e98-9354-46dd-a173-6c25f605f719", "address": "fa:16:3e:a7:e1:be", "network": {"id": "71cadfcf-ea5a-4008-a668-6f95ac9a625b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1979268080-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.159", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "07f7b975ecb449a290e2ae6582e07016", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped266e98-93", "ovs_interfaceid": "ed266e98-9354-46dd-a173-6c25f605f719", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.784306] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.784549] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.784683] env[69982]: DEBUG nova.network.neutron [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1201.806931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b19b24d0-dc90-466a-931f-95abb2d1b2d2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.651s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.867209] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: dffd4365-d2b6-4201-be46-a823399bb2a6] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1202.203651] env[69982]: DEBUG oslo_vmware.api [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865521, 'name': PowerOffVM_Task, 'duration_secs': 0.293756} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.203967] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1202.204183] env[69982]: DEBUG nova.compute.manager [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.205050] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77e0a36-715d-4dfa-b1c9-a43ee27c10da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.268893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Releasing lock "refresh_cache-9815a4f9-3827-4e83-b897-18edadcac55b" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.269268] env[69982]: DEBUG nova.objects.instance [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'migration_context' on Instance uuid 9815a4f9-3827-4e83-b897-18edadcac55b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1202.371331] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 68c212df-c7a0-45c5-a00c-f94c6a9a9bb3] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1202.505705] env[69982]: DEBUG nova.network.neutron [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.718804] env[69982]: DEBUG oslo_concurrency.lockutils [None req-318a0d74-32da-4426-a6b3-63820c34cf6d tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.058s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.772778] env[69982]: DEBUG nova.objects.base [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Object Instance<9815a4f9-3827-4e83-b897-18edadcac55b> lazy-loaded attributes: info_cache,migration_context {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1202.774297] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ce6ab0-f612-4cfb-b3e7-325350c15aca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.796297] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01d14638-e071-4c1a-b8f3-51e098857758 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.803116] env[69982]: DEBUG oslo_vmware.api [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1202.803116] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb60c3-7f87-f3a3-fa73-6378219eb8b9" [ 1202.803116] env[69982]: _type = "Task" [ 1202.803116] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.812789] env[69982]: DEBUG oslo_vmware.api [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb60c3-7f87-f3a3-fa73-6378219eb8b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.875305] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 0b973aa8-6d25-4de9-8a6b-7bb9f65671fb] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1202.886506] env[69982]: DEBUG nova.compute.manager [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-changed-42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1202.886721] env[69982]: DEBUG nova.compute.manager [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing instance network info cache due to event network-changed-42cd3ec7-258a-40b0-84a1-a13b02fc0670. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1202.886967] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Acquiring lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.887140] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Acquired lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.887317] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing network info cache for port 42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1203.009263] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.282191] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully updated port: 389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1203.314962] env[69982]: DEBUG oslo_vmware.api [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bb60c3-7f87-f3a3-fa73-6378219eb8b9, 'name': SearchDatastore_Task, 'duration_secs': 0.01039} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.315302] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.315557] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.340388] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.340642] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.340852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.341078] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.341264] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.343430] env[69982]: INFO nova.compute.manager [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Terminating instance [ 1203.379485] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 71a93f5f-86a9-44e4-ae2c-c77fe37d7b5a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1203.428946] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1203.517357] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.850452] env[69982]: DEBUG nova.compute.manager [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1203.850874] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1203.851728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b01bea-41de-4a95-8796-923383def164 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.862832] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.863242] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fac21fec-6b6c-4104-bec4-8bae3fb7640c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.882995] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5bbc7b58-3e8e-495f-911a-072d282e48a9] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1203.940591] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.940817] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.941009] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore2] 6341394e-2ea2-4d77-b818-6d3bf5a32e97 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.941362] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c5baa72-8510-4529-81a1-53f4bb6b0a7a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.949373] env[69982]: DEBUG oslo_vmware.api [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1203.949373] env[69982]: value = "task-3865523" [ 1203.949373] env[69982]: _type = "Task" [ 1203.949373] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.962657] env[69982]: DEBUG oslo_vmware.api [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865523, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.022615] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Releasing lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.023031] env[69982]: DEBUG nova.compute.manager [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.023217] env[69982]: DEBUG nova.compute.manager [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing instance network info cache due to event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1204.023490] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.023750] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.024036] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1204.074534] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf9c991d-3244-49ee-bd1f-0e0fc3da571c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.083726] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee8e19d-d7b7-4c0e-a5f0-9cac0b3dd44a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.122019] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7055faef-980c-436d-a3d9-b32e89090de9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.133264] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b16af0b-71f6-471a-9c2b-053485e82cd4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.149786] env[69982]: DEBUG nova.compute.provider_tree [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.390619] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 00f8efe0-28ad-4d95-b931-a31de0c03bd7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1204.460378] env[69982]: DEBUG oslo_vmware.api [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865523, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156763} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.460588] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.460771] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.460944] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.461129] env[69982]: INFO nova.compute.manager [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1204.461373] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.461561] env[69982]: DEBUG nova.compute.manager [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1204.461657] env[69982]: DEBUG nova.network.neutron [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1204.534783] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72861719-eba0-4a1d-8f41-b61ee7ea1620 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.558920] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1204.653308] env[69982]: DEBUG nova.scheduler.client.report [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1204.884261] env[69982]: DEBUG nova.compute.manager [req-51f41309-23d2-48e5-b54e-2ec4e01c8aee req-9e53eb3f-90a7-4206-85a7-9d6b12a1ccc1 service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Received event network-vif-deleted-2d1c1373-bca8-422f-8fb6-d622ea4afdd1 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.884815] env[69982]: INFO nova.compute.manager [req-51f41309-23d2-48e5-b54e-2ec4e01c8aee req-9e53eb3f-90a7-4206-85a7-9d6b12a1ccc1 service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Neutron deleted interface 2d1c1373-bca8-422f-8fb6-d622ea4afdd1; detaching it from the instance and deleting it from the info cache [ 1204.885323] env[69982]: DEBUG nova.network.neutron [req-51f41309-23d2-48e5-b54e-2ec4e01c8aee req-9e53eb3f-90a7-4206-85a7-9d6b12a1ccc1 service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.895265] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9c0d0f4f-9e88-4e67-99d9-d957652587cd] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1204.906949] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updated VIF entry in instance network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1204.907377] env[69982]: DEBUG nova.network.neutron [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1204.921824] env[69982]: DEBUG nova.compute.manager [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-plugged-389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.922423] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.922777] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1204.923560] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1204.923560] env[69982]: DEBUG nova.compute.manager [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] No waiting events found dispatching network-vif-plugged-389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1204.923687] env[69982]: WARNING nova.compute.manager [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received unexpected event network-vif-plugged-389211f6-52f2-45f7-bbf1-f3de5dcce67e for instance with vm_state building and task_state spawning. [ 1204.924075] env[69982]: DEBUG nova.compute.manager [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-changed-389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1204.924412] env[69982]: DEBUG nova.compute.manager [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing instance network info cache due to event network-changed-389211f6-52f2-45f7-bbf1-f3de5dcce67e. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1204.924732] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Acquiring lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1204.925013] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Acquired lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1204.925332] env[69982]: DEBUG nova.network.neutron [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing network info cache for port 389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1205.064774] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1205.065154] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1c19128-baa2-4222-a97c-746c6c0f29d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.075673] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1205.075673] env[69982]: value = "task-3865524" [ 1205.075673] env[69982]: _type = "Task" [ 1205.075673] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.084385] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.364844] env[69982]: DEBUG nova.network.neutron [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.390067] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c28ebdd-6e25-4269-8879-4ee7c45e0849 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.400659] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d6dc0e-589a-432f-b03f-d04131dcb5db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.412585] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Successfully updated port: 3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1205.413879] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a8d4c7c6-2ec8-4bbe-a909-8bb5f93f43ae] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1205.415937] env[69982]: DEBUG oslo_concurrency.lockutils [req-ef30e60c-1a86-4164-97ff-4ca9655302e1 req-f7dc03a9-f4d2-4685-aeab-e803213b8736 service nova] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1205.442266] env[69982]: DEBUG nova.compute.manager [req-51f41309-23d2-48e5-b54e-2ec4e01c8aee req-9e53eb3f-90a7-4206-85a7-9d6b12a1ccc1 service nova] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Detach interface failed, port_id=2d1c1373-bca8-422f-8fb6-d622ea4afdd1, reason: Instance 6341394e-2ea2-4d77-b818-6d3bf5a32e97 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1205.477227] env[69982]: DEBUG nova.network.neutron [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1205.552756] env[69982]: DEBUG nova.network.neutron [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.586589] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865524, 'name': PowerOffVM_Task, 'duration_secs': 0.193571} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.586859] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1205.587063] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1205.664423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.349s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.866269] env[69982]: INFO nova.compute.manager [-] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Took 1.40 seconds to deallocate network for instance. [ 1205.917984] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 187cbba3-3700-4c40-a514-28e08ea13426] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1205.919818] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.055853] env[69982]: DEBUG oslo_concurrency.lockutils [req-914b8e0d-6a35-4ec6-a70f-4a142be45dab req-b016fe83-cfbd-4078-89ba-b1a98774f8be service nova] Releasing lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.056212] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.056391] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1206.094193] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1206.094351] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1206.094481] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1206.094754] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1206.094908] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1206.095468] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1206.095468] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1206.095468] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1206.095649] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1206.095803] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1206.095983] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1206.101743] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56e5e6a9-9a6a-4142-a115-ce5a5a7188e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.120263] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1206.120263] env[69982]: value = "task-3865525" [ 1206.120263] env[69982]: _type = "Task" [ 1206.120263] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.129374] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865525, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.232867] env[69982]: INFO nova.scheduler.client.report [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocation for migration 883348c9-d966-48bb-a09d-bb8cdbfbe687 [ 1206.373420] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.373926] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.374018] env[69982]: DEBUG nova.objects.instance [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid 6341394e-2ea2-4d77-b818-6d3bf5a32e97 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.422651] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: fc46eca6-6a60-477c-98de-a2e1f6c7e88b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1206.584152] env[69982]: INFO nova.compute.manager [None req-49cd981b-e331-4680-8492-b2496959e4f0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Get console output [ 1206.584521] env[69982]: WARNING nova.virt.vmwareapi.driver [None req-49cd981b-e331-4680-8492-b2496959e4f0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] The console log is missing. Check your VSPC configuration [ 1206.596670] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1206.630457] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865525, 'name': ReconfigVM_Task, 'duration_secs': 0.232522} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.630766] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1206.738705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-cdb3c269-96fe-4019-8004-83d636b451a0 tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.809s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.926812] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 3edfba43-55e9-4180-bb03-ce008af3a7d7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1206.957478] env[69982]: DEBUG nova.compute.manager [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-plugged-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1206.957478] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.957478] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.957478] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1206.957478] env[69982]: DEBUG nova.compute.manager [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] No waiting events found dispatching network-vif-plugged-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1206.957647] env[69982]: WARNING nova.compute.manager [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received unexpected event network-vif-plugged-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 for instance with vm_state building and task_state spawning. [ 1206.957768] env[69982]: DEBUG nova.compute.manager [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-changed-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1206.958225] env[69982]: DEBUG nova.compute.manager [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing instance network info cache due to event network-changed-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1206.958225] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Acquiring lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.020481] env[69982]: DEBUG nova.network.neutron [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [{"id": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "address": "fa:16:3e:46:fe:f9", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42cd3ec7-25", "ovs_interfaceid": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "address": "fa:16:3e:3f:2e:51", "network": {"id": "48b600ec-cdb5-432b-84be-0d44e4a7fbea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-421615284", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap389211f6-52", "ovs_interfaceid": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "address": "fa:16:3e:0d:97:7f", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3511b9c7-b2", "ovs_interfaceid": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.056600] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c19d296-fd43-470a-83ab-7db6f0793186 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.064136] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eb4962-de53-45ec-acd3-701da3fd0213 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.094629] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7565bc67-454c-418a-92a3-ba392325550a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.102775] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94291df-adc0-4870-bb1f-8e6cc9980689 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.117728] env[69982]: DEBUG nova.compute.provider_tree [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.137220] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1207.137467] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1207.137623] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1207.137802] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1207.137943] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1207.138107] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1207.138314] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1207.138467] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1207.141090] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1207.141521] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1207.141671] env[69982]: DEBUG nova.virt.hardware [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1207.147304] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1207.148974] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d21a6a4-4906-4928-8b4c-8ed5eb1bc9ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.164888] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.165157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.170441] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1207.170441] env[69982]: value = "task-3865526" [ 1207.170441] env[69982]: _type = "Task" [ 1207.170441] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.180114] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.430293] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 475a403c-bfdb-4239-b0d4-3baca441603f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1207.523197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.523626] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance network_info: |[{"id": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "address": "fa:16:3e:46:fe:f9", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42cd3ec7-25", "ovs_interfaceid": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "address": "fa:16:3e:3f:2e:51", "network": {"id": "48b600ec-cdb5-432b-84be-0d44e4a7fbea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-421615284", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap389211f6-52", "ovs_interfaceid": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "address": "fa:16:3e:0d:97:7f", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3511b9c7-b2", "ovs_interfaceid": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1207.523988] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Acquired lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.524202] env[69982]: DEBUG nova.network.neutron [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Refreshing network info cache for port 3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1207.525501] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:fe:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '42cd3ec7-258a-40b0-84a1-a13b02fc0670', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:2e:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1b8b991-feba-44e6-900c-6486e7e122f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '389211f6-52f2-45f7-bbf1-f3de5dcce67e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:97:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ecc4615-18f0-4324-8e16-5e5d513325e2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1207.539174] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Creating folder: Project (4d4602e8ada243b0a9bf86ea9677f14b). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.542717] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-496a57ac-c4ce-40ff-8bd4-267f3b558469 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.554987] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Created folder: Project (4d4602e8ada243b0a9bf86ea9677f14b) in parent group-v767796. [ 1207.555234] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Creating folder: Instances. Parent ref: group-v768102. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1207.555477] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9d9f175-2c4d-447f-b457-788b60a7cb87 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.565836] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Created folder: Instances in parent group-v768102. [ 1207.566083] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1207.566318] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1207.566522] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a769eca-f60e-43d2-abc5-2ce220d2c0fe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.593654] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1207.593654] env[69982]: value = "task-3865529" [ 1207.593654] env[69982]: _type = "Task" [ 1207.593654] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.602291] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865529, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.624274] env[69982]: DEBUG nova.scheduler.client.report [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.669851] env[69982]: DEBUG nova.compute.utils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1207.683187] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865526, 'name': ReconfigVM_Task, 'duration_secs': 0.208728} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.684212] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1207.685112] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e244c339-7a9c-46d8-8acc-0b4b488c2633 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.713099] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1207.716591] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f03218b-0a15-4e27-ad4a-52751813be02 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.735543] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1207.735543] env[69982]: value = "task-3865530" [ 1207.735543] env[69982]: _type = "Task" [ 1207.735543] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.744390] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.795056] env[69982]: DEBUG nova.network.neutron [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updated VIF entry in instance network info cache for port 3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1207.795218] env[69982]: DEBUG nova.network.neutron [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [{"id": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "address": "fa:16:3e:46:fe:f9", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42cd3ec7-25", "ovs_interfaceid": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "address": "fa:16:3e:3f:2e:51", "network": {"id": "48b600ec-cdb5-432b-84be-0d44e4a7fbea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-421615284", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.212", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1b8b991-feba-44e6-900c-6486e7e122f0", "external-id": "nsx-vlan-transportzone-429", "segmentation_id": 429, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap389211f6-52", "ovs_interfaceid": "389211f6-52f2-45f7-bbf1-f3de5dcce67e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "address": "fa:16:3e:0d:97:7f", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3511b9c7-b2", "ovs_interfaceid": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1207.933564] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 719e6e11-ac2f-4b71-9a55-5c110f8aaba4] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1208.104675] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865529, 'name': CreateVM_Task, 'duration_secs': 0.433278} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.104857] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1208.105698] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.105868] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.106256] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1208.106512] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c96c33e-43f0-4d3f-921a-8656b3968815 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.112391] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1208.112391] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521c70ee-3d66-933d-2f87-7d20f8570bda" [ 1208.112391] env[69982]: _type = "Task" [ 1208.112391] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.121508] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521c70ee-3d66-933d-2f87-7d20f8570bda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.129459] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.156363] env[69982]: INFO nova.scheduler.client.report [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance 6341394e-2ea2-4d77-b818-6d3bf5a32e97 [ 1208.177600] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.247676] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865530, 'name': ReconfigVM_Task, 'duration_secs': 0.329044} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.249030] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1208.249030] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.298068] env[69982]: DEBUG oslo_concurrency.lockutils [req-9cc87218-92d3-404d-ab0b-5c2c97567a50 req-e21b91b8-f82d-46de-9a48-93b773bcf733 service nova] Releasing lock "refresh_cache-7d84344b-cc66-4d9a-b5b4-4fd26a75648e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.437714] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ab14774e-c834-41e9-bb3f-87722b51070e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1208.622788] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521c70ee-3d66-933d-2f87-7d20f8570bda, 'name': SearchDatastore_Task, 'duration_secs': 0.011371} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.624029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1208.624029] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1208.624029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.624029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.624029] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1208.625197] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d18815f2-02f9-4c75-a1e5-5fe81762dbc6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.632807] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1208.632985] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1208.633730] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73df70c9-1773-45bb-b472-c4f44c9f273c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.639128] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1208.639128] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aad463-f3d9-406f-1f82-3554e0b2c956" [ 1208.639128] env[69982]: _type = "Task" [ 1208.639128] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.647480] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aad463-f3d9-406f-1f82-3554e0b2c956, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.664860] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f61de0d5-ab74-4a72-a2bf-b6c5acad1167 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "6341394e-2ea2-4d77-b818-6d3bf5a32e97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.324s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.756289] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f6b680-320a-466c-8e8b-8cbb9a848e2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.781212] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e67eae-6bab-46b9-94b2-1bf1fcdba122 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.804358] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1208.941688] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8518f3c8-738d-468a-9f57-de50e4e67108] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1209.150931] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52aad463-f3d9-406f-1f82-3554e0b2c956, 'name': SearchDatastore_Task, 'duration_secs': 0.010569} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.152035] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35316055-e17f-418d-8bc7-610a8dc13ecd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.157806] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1209.157806] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214279f-c1fb-145d-718a-c79da42aea12" [ 1209.157806] env[69982]: _type = "Task" [ 1209.157806] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.165962] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214279f-c1fb-145d-718a-c79da42aea12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.226391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.226632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.226904] env[69982]: INFO nova.compute.manager [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Attaching volume 3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e to /dev/sdb [ 1209.262983] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84c62f88-4311-4fb9-a26f-5719216d3ef9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.270964] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb48f82-bc07-4a3d-a9fe-a1ee4947df15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.284861] env[69982]: DEBUG nova.virt.block_device [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating existing volume attachment record: 28d89260-9f67-437e-a85d-791255b559c7 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1209.445223] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 56ad2f3f-c24f-446e-8df7-09fde60ba6cd] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1209.479508] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.479794] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.480008] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.480201] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.480366] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.482433] env[69982]: INFO nova.compute.manager [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Terminating instance [ 1209.669796] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5214279f-c1fb-145d-718a-c79da42aea12, 'name': SearchDatastore_Task, 'duration_secs': 0.011047} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.670181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.670526] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 7d84344b-cc66-4d9a-b5b4-4fd26a75648e/7d84344b-cc66-4d9a-b5b4-4fd26a75648e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1209.670863] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ce200ae-f733-4a0f-8607-24bb6316a918 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.679682] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1209.679682] env[69982]: value = "task-3865532" [ 1209.679682] env[69982]: _type = "Task" [ 1209.679682] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.689469] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865532, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.949355] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5100234f-ea02-40bf-b883-fa9a159c7637] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1209.986163] env[69982]: DEBUG nova.compute.manager [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1209.986452] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1209.987385] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853fe1d3-3ee8-48bd-b354-6bb9e9fae63e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.997404] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1209.997810] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4287b5ff-9064-494e-9169-b725479bd0a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.006060] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1210.006060] env[69982]: value = "task-3865533" [ 1210.006060] env[69982]: _type = "Task" [ 1210.006060] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.016734] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.190581] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865532, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459498} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.190743] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 7d84344b-cc66-4d9a-b5b4-4fd26a75648e/7d84344b-cc66-4d9a-b5b4-4fd26a75648e.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1210.191476] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1210.191476] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05a8e791-b075-49c1-9ea5-5500bccf0c57 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.199983] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1210.199983] env[69982]: value = "task-3865534" [ 1210.199983] env[69982]: _type = "Task" [ 1210.199983] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.212068] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865534, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.446231] env[69982]: DEBUG nova.network.neutron [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Port 0d321236-4177-49d0-afe0-f203e679fb0a binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1210.453041] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ad43c35a-69bc-4c84-8869-cfde6f516b9b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1210.516517] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865533, 'name': PowerOffVM_Task, 'duration_secs': 0.290156} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.516777] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.516962] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.517245] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-429a4f52-4b36-438a-b177-2b38a6c424ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.613122] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.613514] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.613819] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleting the datastore file [datastore1] b5ad55cc-9010-46be-bfd0-28fa1607f1c3 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.614246] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57a21a9a-6e2f-45e4-ba5a-6a621c0caf73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.622351] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for the task: (returnval){ [ 1210.622351] env[69982]: value = "task-3865536" [ 1210.622351] env[69982]: _type = "Task" [ 1210.622351] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.634072] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.710566] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865534, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073303} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.710842] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1210.711650] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f4cae8-b007-4422-a147-b031dde87280 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.739255] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 7d84344b-cc66-4d9a-b5b4-4fd26a75648e/7d84344b-cc66-4d9a-b5b4-4fd26a75648e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.739679] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60cb2113-59f2-4598-b979-b7c867a188f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.760253] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1210.760253] env[69982]: value = "task-3865537" [ 1210.760253] env[69982]: _type = "Task" [ 1210.760253] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.769700] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865537, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.956484] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6d390a12-bfb4-4d91-9e83-a81560a08e1a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1211.133019] env[69982]: DEBUG oslo_vmware.api [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Task: {'id': task-3865536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401791} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.133296] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1211.133491] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1211.133739] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1211.133944] env[69982]: INFO nova.compute.manager [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1211.134236] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1211.134435] env[69982]: DEBUG nova.compute.manager [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1211.134530] env[69982]: DEBUG nova.network.neutron [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1211.271545] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865537, 'name': ReconfigVM_Task, 'duration_secs': 0.339109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.271819] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 7d84344b-cc66-4d9a-b5b4-4fd26a75648e/7d84344b-cc66-4d9a-b5b4-4fd26a75648e.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.272577] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b2656a2-5454-4f52-acf2-06e6ab4c8e6c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.280482] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1211.280482] env[69982]: value = "task-3865538" [ 1211.280482] env[69982]: _type = "Task" [ 1211.280482] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.289152] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865538, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.376704] env[69982]: DEBUG nova.compute.manager [req-65114795-a502-4ced-b6bd-23128ce3112d req-6400e8c1-eb4b-45e9-ba32-77de0fc72dec service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Received event network-vif-deleted-8614e294-a2cc-4a82-ab33-bbc5def50f56 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.376704] env[69982]: INFO nova.compute.manager [req-65114795-a502-4ced-b6bd-23128ce3112d req-6400e8c1-eb4b-45e9-ba32-77de0fc72dec service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Neutron deleted interface 8614e294-a2cc-4a82-ab33-bbc5def50f56; detaching it from the instance and deleting it from the info cache [ 1211.376704] env[69982]: DEBUG nova.network.neutron [req-65114795-a502-4ced-b6bd-23128ce3112d req-6400e8c1-eb4b-45e9-ba32-77de0fc72dec service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.463354] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 5b79fc38-ace3-4f94-8d1c-b77912f44a1d] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1211.472207] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.472411] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.472600] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.790689] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865538, 'name': Rename_Task, 'duration_secs': 0.150923} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.790975] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1211.791240] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-335c71c1-06df-4e4e-8379-c25b71434b4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.798499] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1211.798499] env[69982]: value = "task-3865540" [ 1211.798499] env[69982]: _type = "Task" [ 1211.798499] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.806757] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865540, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.859494] env[69982]: DEBUG nova.network.neutron [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.879338] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c76a6ace-8bb7-4204-999e-25384faacb2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.890238] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8850c0a2-46ab-4474-a56b-6ec7501ee310 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.924210] env[69982]: DEBUG nova.compute.manager [req-65114795-a502-4ced-b6bd-23128ce3112d req-6400e8c1-eb4b-45e9-ba32-77de0fc72dec service nova] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Detach interface failed, port_id=8614e294-a2cc-4a82-ab33-bbc5def50f56, reason: Instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1211.974995] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6c8f3f0f-e4eb-4a0e-a8e4-5def7bcd6f82] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1212.309335] env[69982]: DEBUG oslo_vmware.api [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865540, 'name': PowerOnVM_Task, 'duration_secs': 0.464681} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.309662] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1212.309834] env[69982]: INFO nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Took 12.63 seconds to spawn the instance on the hypervisor. [ 1212.310031] env[69982]: DEBUG nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1212.310843] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b39193b-f288-4d59-80ab-9b016dc2716d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.362382] env[69982]: INFO nova.compute.manager [-] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Took 1.23 seconds to deallocate network for instance. [ 1212.479809] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a9a14fa8-7f58-48f9-994d-b5063833a81b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1212.511658] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.511853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.512041] env[69982]: DEBUG nova.network.neutron [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1212.829242] env[69982]: INFO nova.compute.manager [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Took 20.57 seconds to build instance. [ 1212.869347] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.869767] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.870136] env[69982]: DEBUG nova.objects.instance [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lazy-loading 'resources' on Instance uuid b5ad55cc-9010-46be-bfd0-28fa1607f1c3 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.987019] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: bba6f430-5af5-4d8a-9cf4-082207c170a5] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1213.236302] env[69982]: DEBUG nova.network.neutron [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1213.331241] env[69982]: DEBUG oslo_concurrency.lockutils [None req-72f53846-019c-451d-b6f9-54f7c7aab14b tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.080s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.489027] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8358b105-7276-4292-804d-534f9fb1535e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1213.529760] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cd6042-51e0-40d1-b09b-9837ff380bde {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.537575] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4b58c5a-e3e0-43c8-aa7a-0852982c640f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.568063] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc744b2-d450-4c9f-bf84-275e7e5f20a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.576599] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c18d8fa-7d94-42aa-95eb-1339accd5611 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.592162] env[69982]: DEBUG nova.compute.provider_tree [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.625621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.625853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.626077] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.626264] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.626527] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.628586] env[69982]: INFO nova.compute.manager [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Terminating instance [ 1213.739498] env[69982]: DEBUG oslo_concurrency.lockutils [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.832716] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1213.833037] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768105', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'name': 'volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '191d4433-cae3-48af-9c83-fa67499ad49c', 'attached_at': '', 'detached_at': '', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'serial': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1213.834081] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99535da4-0425-4b63-875a-ee586486a1b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.853508] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838c2017-72ae-4153-a1f4-d2c88728c448 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.879748] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e/volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.880061] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71618691-ada7-49e3-90ee-7470d3c269aa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.900138] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1213.900138] env[69982]: value = "task-3865541" [ 1213.900138] env[69982]: _type = "Task" [ 1213.900138] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.909778] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.992274] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 10a4294b-39ce-4643-98b5-71ac283f05f5] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1214.095818] env[69982]: DEBUG nova.scheduler.client.report [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1214.133723] env[69982]: DEBUG nova.compute.manager [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1214.133931] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1214.134888] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a657c892-4f17-4973-85db-bcc3112abda3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.143410] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1214.143681] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f749e1de-e7a8-42f9-a3cb-6f0b65d69f46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.151800] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1214.151800] env[69982]: value = "task-3865542" [ 1214.151800] env[69982]: _type = "Task" [ 1214.151800] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.161220] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.249014] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6207b0-90a9-4c5f-b1a0-da8a8502a822 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.257283] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9260d3-8ce0-49cc-8d50-eea4494add3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.412372] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865541, 'name': ReconfigVM_Task, 'duration_secs': 0.415795} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.412782] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfigured VM instance instance-0000006c to attach disk [datastore1] volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e/volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.418028] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b2dd362-567a-47a0-bacc-9b2e69168be0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.434711] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1214.434711] env[69982]: value = "task-3865543" [ 1214.434711] env[69982]: _type = "Task" [ 1214.434711] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.444624] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865543, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.495923] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 96ad6ba4-549c-4ea2-ac4d-c7a33c03bcfa] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1214.602177] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.732s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.620517] env[69982]: INFO nova.scheduler.client.report [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Deleted allocations for instance b5ad55cc-9010-46be-bfd0-28fa1607f1c3 [ 1214.662593] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865542, 'name': PowerOffVM_Task, 'duration_secs': 0.196211} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.662883] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.663100] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.663360] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a047cc6c-95bf-4b24-a2e1-2e1f6938b398 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.947239] env[69982]: DEBUG oslo_vmware.api [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865543, 'name': ReconfigVM_Task, 'duration_secs': 0.150214} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.947584] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768105', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'name': 'volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '191d4433-cae3-48af-9c83-fa67499ad49c', 'attached_at': '', 'detached_at': '', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'serial': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1214.998977] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 331f218a-ad6b-4417-b56d-83113e0c92cb] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1215.128433] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e2b5f882-2760-4a61-b9c6-93639e8f7fd6 tempest-ServersTestJSON-915624429 tempest-ServersTestJSON-915624429-project-member] Lock "b5ad55cc-9010-46be-bfd0-28fa1607f1c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.649s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.366213] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5ac52b-b0ec-4ede-9af7-fe2f37272ccf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.389022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5759b06-7966-4eb7-8323-80e11f1dcadd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.397919] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1215.502167] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ff2c680a-211a-44ad-b00d-1037f1fcb856] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1215.904801] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1215.905027] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c3f030f-aa68-427f-9792-4fd7ae08631d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.913265] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1215.913265] env[69982]: value = "task-3865545" [ 1215.913265] env[69982]: _type = "Task" [ 1215.913265] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.922952] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.986013] env[69982]: DEBUG nova.objects.instance [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid 191d4433-cae3-48af-9c83-fa67499ad49c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.005881] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 67613f71-a91e-4dae-8a6c-cd74c4821339] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1216.160826] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1216.161071] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1216.161253] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleting the datastore file [datastore1] 7d84344b-cc66-4d9a-b5b4-4fd26a75648e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1216.161551] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73b69446-70d2-41b2-a009-e02bb6a067f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.173038] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1216.173038] env[69982]: value = "task-3865546" [ 1216.173038] env[69982]: _type = "Task" [ 1216.173038] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.182757] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.352023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.352023] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.424456] env[69982]: DEBUG oslo_vmware.api [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865545, 'name': PowerOnVM_Task, 'duration_secs': 0.458975} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.424753] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1216.424943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-59e289f5-defb-4c3f-b914-adbb9a42b58a tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance '8089e191-85df-46cd-8a6b-415bfd5d6748' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1216.492031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fac3dce1-0627-43eb-a077-ef7a38370180 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.508358] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a3e3106d-b7df-49c8-9341-a843977aefe4] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1216.684104] env[69982]: DEBUG oslo_vmware.api [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179342} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.684408] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1216.684686] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1216.684851] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1216.685059] env[69982]: INFO nova.compute.manager [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Took 2.55 seconds to destroy the instance on the hypervisor. [ 1216.685320] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1216.685509] env[69982]: DEBUG nova.compute.manager [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1216.685602] env[69982]: DEBUG nova.network.neutron [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1216.855585] env[69982]: DEBUG nova.compute.utils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1216.946258] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.946768] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.010910] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 69103bad-cb3f-4cd1-bfa1-c19b10395674] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1217.105894] env[69982]: DEBUG nova.compute.manager [req-d5980499-b31b-4a7a-b059-e21f158fa49e req-a8a272ef-2665-474c-acb2-2d492f459f6c service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-deleted-389211f6-52f2-45f7-bbf1-f3de5dcce67e {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.106314] env[69982]: INFO nova.compute.manager [req-d5980499-b31b-4a7a-b059-e21f158fa49e req-a8a272ef-2665-474c-acb2-2d492f459f6c service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Neutron deleted interface 389211f6-52f2-45f7-bbf1-f3de5dcce67e; detaching it from the instance and deleting it from the info cache [ 1217.107221] env[69982]: DEBUG nova.network.neutron [req-d5980499-b31b-4a7a-b059-e21f158fa49e req-a8a272ef-2665-474c-acb2-2d492f459f6c service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [{"id": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "address": "fa:16:3e:46:fe:f9", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42cd3ec7-25", "ovs_interfaceid": "42cd3ec7-258a-40b0-84a1-a13b02fc0670", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "address": "fa:16:3e:0d:97:7f", "network": {"id": "ed5fc841-09dd-4276-921a-ad95546ad39b", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-620845760", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.244", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ecc4615-18f0-4324-8e16-5e5d513325e2", "external-id": "nsx-vlan-transportzone-167", "segmentation_id": 167, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3511b9c7-b2", "ovs_interfaceid": "3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.358530] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.449834] env[69982]: INFO nova.compute.manager [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Detaching volume 3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e [ 1217.488124] env[69982]: INFO nova.virt.block_device [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Attempting to driver detach volume 3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e from mountpoint /dev/sdb [ 1217.488377] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1217.488568] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768105', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'name': 'volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '191d4433-cae3-48af-9c83-fa67499ad49c', 'attached_at': '', 'detached_at': '', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'serial': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1217.489452] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64ebb7f-3a5b-4d81-af9c-7d2d795a74eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.512637] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28bd362f-b3e2-462f-86d5-37bac9f466a4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.521050] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a61e3d25-9064-4f18-b7f1-0045b705571a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1217.522611] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5efa1a52-ee11-4ed3-a149-d55930acd5c5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.545961] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3645b5f-4c16-480f-b04c-80d128cd04ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.563138] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] The volume has not been displaced from its original location: [datastore1] volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e/volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1217.568494] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1217.569043] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a9d805e-290c-4e33-92d0-1c6a71895ec2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.587836] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1217.587836] env[69982]: value = "task-3865547" [ 1217.587836] env[69982]: _type = "Task" [ 1217.587836] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.596485] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865547, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.611764] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-409fc70c-613f-489d-bb76-87ec78b0402b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.621313] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9ccb8f9-b279-47f1-9600-533d3bfac225 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.656698] env[69982]: DEBUG nova.compute.manager [req-d5980499-b31b-4a7a-b059-e21f158fa49e req-a8a272ef-2665-474c-acb2-2d492f459f6c service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Detach interface failed, port_id=389211f6-52f2-45f7-bbf1-f3de5dcce67e, reason: Instance 7d84344b-cc66-4d9a-b5b4-4fd26a75648e could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1217.906123] env[69982]: DEBUG nova.network.neutron [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.026155] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d21659fd-015d-4f5b-b4b5-f38f550e0f00] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1218.098234] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865547, 'name': ReconfigVM_Task, 'duration_secs': 0.26878} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.098534] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1218.103262] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bdd7b19-b80d-472e-8f71-fad6e7a7bf50 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.118784] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1218.118784] env[69982]: value = "task-3865548" [ 1218.118784] env[69982]: _type = "Task" [ 1218.118784] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.127522] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865548, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.409358] env[69982]: INFO nova.compute.manager [-] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Took 1.72 seconds to deallocate network for instance. [ 1218.429659] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.429912] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.430147] env[69982]: INFO nova.compute.manager [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Attaching volume 6f459808-fc05-4a22-b8c6-83bd72fd1939 to /dev/sdb [ 1218.461254] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb921dd-a22c-4c21-b4cd-7fadcd903fca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.468925] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af66f131-ff81-4a96-b10f-e49e3794d990 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.483102] env[69982]: DEBUG nova.virt.block_device [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating existing volume attachment record: a4852fef-40c6-4828-aa67-e1e07ce2e3d3 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1218.486034] env[69982]: DEBUG nova.network.neutron [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Port 0d321236-4177-49d0-afe0-f203e679fb0a binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1218.486281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.486607] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.486687] env[69982]: DEBUG nova.network.neutron [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1218.529026] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9b733e1e-0532-4d91-a460-6b1f1971f388] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1218.628975] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865548, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.915601] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.915961] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.916208] env[69982]: DEBUG nova.objects.instance [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lazy-loading 'resources' on Instance uuid 7d84344b-cc66-4d9a-b5b4-4fd26a75648e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.032342] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: b3aeaa98-724b-4563-aeaf-a089906eb0eb] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1219.130594] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865548, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.136052] env[69982]: DEBUG nova.compute.manager [req-80667f8d-3c17-4804-b6d5-826e3929ef60 req-1654dae0-9f94-4acc-83f1-634f5b1e3a0b service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-deleted-3511b9c7-b2fe-44fc-82f9-f6a6ec5d4d01 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1219.136375] env[69982]: DEBUG nova.compute.manager [req-80667f8d-3c17-4804-b6d5-826e3929ef60 req-1654dae0-9f94-4acc-83f1-634f5b1e3a0b service nova] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Received event network-vif-deleted-42cd3ec7-258a-40b0-84a1-a13b02fc0670 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1219.275529] env[69982]: DEBUG nova.network.neutron [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1219.538536] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 48dbc665-8286-4d5d-af4e-1a85d1742952] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1219.568686] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08bb14e-fb06-4617-9516-1785831b4516 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.577118] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e25ada-32b9-408e-9bf1-64d8cba00967 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.610729] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539da91a-66b7-4efa-ba65-b66ddba17f4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.619157] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1a4d60-0622-4ba2-b70d-3604aae7693f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.637914] env[69982]: DEBUG nova.compute.provider_tree [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1219.642573] env[69982]: DEBUG oslo_vmware.api [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865548, 'name': ReconfigVM_Task, 'duration_secs': 1.146714} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.643124] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768105', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'name': 'volume-3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '191d4433-cae3-48af-9c83-fa67499ad49c', 'attached_at': '', 'detached_at': '', 'volume_id': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e', 'serial': '3e8dbc03-2b8d-4197-a40f-9f3eda1eb33e'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1219.778747] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.042381] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 3f896859-5a4a-4a59-bee8-b116e291fbe7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1220.163169] env[69982]: ERROR nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [req-d59d0b9b-cffd-4949-9231-f764d21c541e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d59d0b9b-cffd-4949-9231-f764d21c541e"}]} [ 1220.182313] env[69982]: DEBUG nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1220.190570] env[69982]: DEBUG nova.objects.instance [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid 191d4433-cae3-48af-9c83-fa67499ad49c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.198742] env[69982]: DEBUG nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1220.198979] env[69982]: DEBUG nova.compute.provider_tree [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1220.210933] env[69982]: DEBUG nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1220.229585] env[69982]: DEBUG nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1220.282681] env[69982]: DEBUG nova.compute.manager [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69982) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1220.372742] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1962d248-6a68-4c7d-8855-8118422456c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.380615] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92148627-56ce-46f1-b277-02c47f5af591 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.412051] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3105f8-805c-41bc-bf2d-bb922a788a24 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.420125] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38860773-cbea-459a-a831-78ef0c7b6bba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.436458] env[69982]: DEBUG nova.compute.provider_tree [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1220.545927] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 570675a8-3ec0-4fe6-b123-d3901d56b8cf] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1220.974980] env[69982]: DEBUG nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 159 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1220.975346] env[69982]: DEBUG nova.compute.provider_tree [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 159 to 160 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1220.975600] env[69982]: DEBUG nova.compute.provider_tree [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1221.048882] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 43a8e7dd-843b-49f6-9edb-60c2b380e9c2] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1221.201042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-79dda459-3ca3-41f5-b820-ff2d8aa3faf3 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.253s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.380220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.481456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.565s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.484125] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.104s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.503422] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.503677] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.503909] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.504157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.504352] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.507279] env[69982]: INFO nova.compute.manager [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Terminating instance [ 1221.510229] env[69982]: INFO nova.scheduler.client.report [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleted allocations for instance 7d84344b-cc66-4d9a-b5b4-4fd26a75648e [ 1221.551898] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9dcaa045-83c6-4e74-881d-a85a1991dbe3] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1221.987034] env[69982]: DEBUG nova.objects.instance [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'migration_context' on Instance uuid 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.017859] env[69982]: DEBUG nova.compute.manager [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1222.018083] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1222.018563] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5eefdd9f-2433-40bf-bc2e-bda050b80d01 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "7d84344b-cc66-4d9a-b5b4-4fd26a75648e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.393s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.020071] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187cedb9-73f0-49d6-adb8-447bc51f2b61 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.028857] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1222.029100] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f428a814-7997-4801-9f37-1c4561ec8c0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.035755] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1222.035755] env[69982]: value = "task-3865553" [ 1222.035755] env[69982]: _type = "Task" [ 1222.035755] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.046422] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865553, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.055333] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d43e5e7a-577d-4fe9-aff7-9012adfbdb9a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1222.546722] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865553, 'name': PowerOffVM_Task, 'duration_secs': 0.220889} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.547381] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1222.547597] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1222.547869] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ba4c677a-8f02-47c6-8ad7-216b6df93fee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.558204] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: cd839916-6daf-4b31-941d-6305a585bfaa] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1222.611704] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1222.611950] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1222.612257] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleting the datastore file [datastore2] 191d4433-cae3-48af-9c83-fa67499ad49c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1222.615150] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-868d0c72-8ccc-4ad8-8480-8d3ba1d20f39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.622319] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1222.622319] env[69982]: value = "task-3865555" [ 1222.622319] env[69982]: _type = "Task" [ 1222.622319] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.631972] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.642885] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942658f9-484c-476f-b286-60984188c6da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.651709] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9a4161-9f45-4520-a064-5d8419e23b1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.683137] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7112f92f-d5fc-41bc-aaa0-7b9e05f98456 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.691792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af782bd4-1c7d-441f-9394-586396aa9798 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.708030] env[69982]: DEBUG nova.compute.provider_tree [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.027643] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1223.028044] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768107', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'name': 'volume-6f459808-fc05-4a22-b8c6-83bd72fd1939', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ce24e165-230a-44bb-ae46-d1479e71585a', 'attached_at': '', 'detached_at': '', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'serial': '6f459808-fc05-4a22-b8c6-83bd72fd1939'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1223.029092] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0cfe71-b8c0-4907-aa0d-1d992920e7da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.047587] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281f110d-ac62-4d28-9cb7-acc0ea573e2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.065994] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9a1de08e-3206-44cc-8d34-a5527faf9684] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1223.075425] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-6f459808-fc05-4a22-b8c6-83bd72fd1939/volume-6f459808-fc05-4a22-b8c6-83bd72fd1939.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1223.075999] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee356d26-93c7-47b3-b496-2f63a00dc5cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.095865] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1223.095865] env[69982]: value = "task-3865556" [ 1223.095865] env[69982]: _type = "Task" [ 1223.095865] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.105214] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865556, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.131809] env[69982]: DEBUG oslo_vmware.api [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.132129] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1223.132261] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1223.132434] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1223.132607] env[69982]: INFO nova.compute.manager [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1223.132850] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1223.133050] env[69982]: DEBUG nova.compute.manager [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1223.133153] env[69982]: DEBUG nova.network.neutron [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1223.245352] env[69982]: DEBUG nova.scheduler.client.report [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1223.245640] env[69982]: DEBUG nova.compute.provider_tree [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 160 to 161 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1223.245752] env[69982]: DEBUG nova.compute.provider_tree [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1223.577969] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a4064177-051b-4ec8-a1fc-fa5d299add8b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1223.581668] env[69982]: DEBUG nova.compute.manager [req-18ee4643-f3c4-41b2-8fd4-997451c1982b req-9d188d01-d064-4e1a-835e-a651d30effd1 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Received event network-vif-deleted-4b426f54-9cfa-4f17-ac93-6cc3529b9b86 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1223.581858] env[69982]: INFO nova.compute.manager [req-18ee4643-f3c4-41b2-8fd4-997451c1982b req-9d188d01-d064-4e1a-835e-a651d30effd1 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Neutron deleted interface 4b426f54-9cfa-4f17-ac93-6cc3529b9b86; detaching it from the instance and deleting it from the info cache [ 1223.582057] env[69982]: DEBUG nova.network.neutron [req-18ee4643-f3c4-41b2-8fd4-997451c1982b req-9d188d01-d064-4e1a-835e-a651d30effd1 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.608118] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865556, 'name': ReconfigVM_Task, 'duration_secs': 0.381798} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.608410] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-6f459808-fc05-4a22-b8c6-83bd72fd1939/volume-6f459808-fc05-4a22-b8c6-83bd72fd1939.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.613795] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa364a5e-7428-4c79-ae6e-d410e15512e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.631483] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1223.631483] env[69982]: value = "task-3865557" [ 1223.631483] env[69982]: _type = "Task" [ 1223.631483] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.641164] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.058034] env[69982]: DEBUG nova.network.neutron [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.083723] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: d2684194-a688-4466-9852-1f4ff656f057] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1224.087363] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-11d7da10-2d59-4ee8-a6d2-2494da73c14f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.097309] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8640b4f-283a-4dd3-8dde-6ad0813a6847 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.132336] env[69982]: DEBUG nova.compute.manager [req-18ee4643-f3c4-41b2-8fd4-997451c1982b req-9d188d01-d064-4e1a-835e-a651d30effd1 service nova] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Detach interface failed, port_id=4b426f54-9cfa-4f17-ac93-6cc3529b9b86, reason: Instance 191d4433-cae3-48af-9c83-fa67499ad49c could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1224.141844] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.256860] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.773s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.560398] env[69982]: INFO nova.compute.manager [-] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Took 1.43 seconds to deallocate network for instance. [ 1224.588663] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9123b08c-d2ec-4c4d-bade-0acdae75640a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1224.643441] env[69982]: DEBUG oslo_vmware.api [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865557, 'name': ReconfigVM_Task, 'duration_secs': 0.99314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.643707] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768107', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'name': 'volume-6f459808-fc05-4a22-b8c6-83bd72fd1939', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ce24e165-230a-44bb-ae46-d1479e71585a', 'attached_at': '', 'detached_at': '', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'serial': '6f459808-fc05-4a22-b8c6-83bd72fd1939'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1224.737150] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.737502] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.067277] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.067648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.067899] env[69982]: DEBUG nova.objects.instance [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'resources' on Instance uuid 191d4433-cae3-48af-9c83-fa67499ad49c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.092558] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8b812422-4ca6-4d2b-b6af-873fdb21fab6] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1225.240282] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1225.595974] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4597a0b8-2c04-4755-8e0d-e00e5cdaacd7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1225.691483] env[69982]: DEBUG nova.objects.instance [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1225.733615] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd57d48-0911-471b-9e1d-f00b9a87c2f3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.742598] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c45c9c-30b6-43f6-9fbf-2a06a4bae4c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.776000] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.776814] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f73880-dc64-49e1-af23-bec3f3736550 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.788083] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ac6bb1-9cc0-43f2-85fd-f3e8ba94b88a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.793038] env[69982]: INFO nova.compute.manager [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Swapping old allocation on dict_keys(['206a5498-2e79-46c1-a636-9488a05fb67d']) held by migration 5cd5a310-8734-406a-9714-25adece293f3 for instance [ 1225.806810] env[69982]: DEBUG nova.compute.provider_tree [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1225.820905] env[69982]: DEBUG nova.scheduler.client.report [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Overwriting current allocation {'allocations': {'206a5498-2e79-46c1-a636-9488a05fb67d': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 162}}, 'project_id': '820dcd8333cb4a678ef562e4150518d2', 'user_id': '64533b0ad8894d41bdf9fe921b440063', 'consumer_generation': 1} on consumer 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1225.904326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.904541] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.904735] env[69982]: DEBUG nova.network.neutron [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1225.982191] env[69982]: DEBUG oslo_concurrency.lockutils [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.099240] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 7af5a14d-f586-4746-9831-8be255581637] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1226.199991] env[69982]: DEBUG oslo_concurrency.lockutils [None req-572afee6-24a0-4d3b-8fac-392149365527 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.770s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.200822] env[69982]: DEBUG oslo_concurrency.lockutils [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.219s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.201023] env[69982]: DEBUG nova.compute.manager [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1226.202170] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1db351-23bd-41c0-a9b5-0730801ead65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.210523] env[69982]: DEBUG nova.compute.manager [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1226.211136] env[69982]: DEBUG nova.objects.instance [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.311851] env[69982]: DEBUG nova.scheduler.client.report [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.525209] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.525577] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.525908] env[69982]: DEBUG nova.objects.instance [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1226.604084] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 930c8740-5ad1-4491-8dd6-1a568eaa6f62] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1226.678279] env[69982]: DEBUG nova.network.neutron [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [{"id": "0d321236-4177-49d0-afe0-f203e679fb0a", "address": "fa:16:3e:62:03:36", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0d321236-41", "ovs_interfaceid": "0d321236-4177-49d0-afe0-f203e679fb0a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.819220] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.751s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.821660] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.046s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.823547] env[69982]: INFO nova.compute.claims [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1226.839671] env[69982]: INFO nova.scheduler.client.report [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted allocations for instance 191d4433-cae3-48af-9c83-fa67499ad49c [ 1227.031216] env[69982]: DEBUG nova.objects.instance [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1227.106630] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: a8217447-bc22-4b84-925f-c3c09fb7228c] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1227.181051] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-8089e191-85df-46cd-8a6b-415bfd5d6748" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.181941] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ff788e-936c-40d7-b0bd-bd53ff6ac139 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.190722] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a9c1a2-66bf-43de-b541-6c44db5c5af2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.219177] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1227.219575] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae46ff80-99e7-4caf-8220-8c84b5fa2e1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.227425] env[69982]: DEBUG oslo_vmware.api [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1227.227425] env[69982]: value = "task-3865558" [ 1227.227425] env[69982]: _type = "Task" [ 1227.227425] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.238324] env[69982]: DEBUG oslo_vmware.api [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1227.348042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3388455d-a49d-410c-9aff-2f196664472f tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "191d4433-cae3-48af-9c83-fa67499ad49c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.844s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.493221] env[69982]: DEBUG nova.compute.manager [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Stashing vm_state: active {{(pid=69982) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1227.533676] env[69982]: DEBUG nova.objects.base [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance<36b7f89e-7552-40b9-ada4-01abfcea8310> lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1227.533938] env[69982]: DEBUG nova.network.neutron [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1227.610461] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 35fdf25e-c8c0-4123-a95a-2a4c1a504f99] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1227.639941] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c02232b3-37d7-4e9a-9f1c-598824ae705c tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.114s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.739984] env[69982]: DEBUG oslo_vmware.api [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865558, 'name': PowerOffVM_Task, 'duration_secs': 0.277248} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1227.740241] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1227.740473] env[69982]: DEBUG nova.compute.manager [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1227.741288] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee60147-0c52-446b-aa83-4a916310e589 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.975239] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e404cff-9357-4435-bb33-8f707bb633a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.983953] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845616fa-d906-47a4-af9c-eee5074814f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.020259] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac05ce8f-9ac8-4632-b561-6318ba5a7dc6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.029383] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba811c3-bf94-4b19-bb6a-69f367ab3b0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.034356] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1228.045675] env[69982]: DEBUG nova.compute.provider_tree [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.114438] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.114793] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances with incomplete migration {{(pid=69982) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1228.254132] env[69982]: DEBUG oslo_concurrency.lockutils [None req-276ea97d-4d68-4ce4-883b-cd535bca344a tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1228.298793] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1228.298793] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f751a2df-e458-4281-97d0-348092556786 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.306608] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1228.306608] env[69982]: value = "task-3865559" [ 1228.306608] env[69982]: _type = "Task" [ 1228.306608] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.318411] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865559, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.424936] env[69982]: DEBUG nova.objects.instance [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1228.551024] env[69982]: DEBUG nova.scheduler.client.report [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.617237] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1228.817867] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865559, 'name': PowerOffVM_Task, 'duration_secs': 0.305573} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.818240] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1228.818961] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1228.819370] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1228.819549] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1228.819739] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1228.819887] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1228.820046] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1228.820265] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1228.820427] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1228.820596] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1228.820762] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1228.820934] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1228.826695] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc138d2b-23ff-4c3d-997a-3b73c52c7ae8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.846303] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1228.846303] env[69982]: value = "task-3865560" [ 1228.846303] env[69982]: _type = "Task" [ 1228.846303] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.855595] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865560, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.932766] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.932967] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.933160] env[69982]: DEBUG nova.network.neutron [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1228.933364] env[69982]: DEBUG nova.objects.instance [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'info_cache' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.055799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.056807] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1229.059021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.025s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.357032] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865560, 'name': ReconfigVM_Task, 'duration_secs': 0.163177} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.357684] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5650989-bf83-40f9-9740-9eab0f60daa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.380391] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1229.380650] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1229.380805] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1229.380983] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1229.381144] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1229.381294] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1229.381504] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1229.381662] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1229.381827] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1229.381988] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1229.382188] env[69982]: DEBUG nova.virt.hardware [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1229.383066] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79655c9f-1b5d-48da-bd73-7e8aafee221c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.389704] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1229.389704] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52359c98-553b-84d5-b086-a7633d3c03e1" [ 1229.389704] env[69982]: _type = "Task" [ 1229.389704] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.399047] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52359c98-553b-84d5-b086-a7633d3c03e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.436833] env[69982]: DEBUG nova.objects.base [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1229.562680] env[69982]: DEBUG nova.compute.utils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1229.568526] env[69982]: INFO nova.compute.claims [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.572447] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1229.572629] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1229.576219] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.576556] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.576903] env[69982]: DEBUG nova.objects.instance [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1229.626394] env[69982]: DEBUG nova.policy [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '020a36f3aea34b1a8dcc379df67518d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d4602e8ada243b0a9bf86ea9677f14b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1229.901604] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52359c98-553b-84d5-b086-a7633d3c03e1, 'name': SearchDatastore_Task, 'duration_secs': 0.008307} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.907648] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1229.908027] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27cbfa11-b1e4-4905-9bb5-f5758767330b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.930142] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1229.930142] env[69982]: value = "task-3865561" [ 1229.930142] env[69982]: _type = "Task" [ 1229.930142] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.935049] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Successfully created port: af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1229.940497] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865561, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.966878] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.967223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.073153] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1230.079040] env[69982]: INFO nova.compute.resource_tracker [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating resource usage from migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae [ 1230.225769] env[69982]: DEBUG nova.objects.instance [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1230.253136] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Successfully created port: c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1230.258181] env[69982]: DEBUG nova.network.neutron [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [{"id": "56cebe1d-243c-4f51-b0a0-200e18141707", "address": "fa:16:3e:49:24:7a", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56cebe1d-24", "ovs_interfaceid": "56cebe1d-243c-4f51-b0a0-200e18141707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.275913] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e5a7f6-1d87-4f52-b39d-db7ea9521bdd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.284517] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789aa459-80a9-4b61-a3b4-7cb2f572e946 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.317996] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef25749-65a4-4813-8116-5a2c997975ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.326015] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49bbfd52-d572-415b-9707-02edc05416b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.341379] env[69982]: DEBUG nova.compute.provider_tree [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1231.197738] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1231.204704] env[69982]: DEBUG nova.objects.base [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance<36b7f89e-7552-40b9-ada4-01abfcea8310> lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1231.204913] env[69982]: DEBUG nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1231.207149] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.208916] env[69982]: DEBUG nova.scheduler.client.report [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.218141] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865561, 'name': ReconfigVM_Task, 'duration_secs': 0.260886} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.218424] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1231.220093] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12b2e548-acdd-4ddd-8221-f979e9458c78 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.248223] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1231.250263] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca57ca69-d082-4266-a9ac-bf198ee1b5b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.264707] env[69982]: DEBUG nova.policy [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1231.271974] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1231.271974] env[69982]: value = "task-3865562" [ 1231.271974] env[69982]: _type = "Task" [ 1231.271974] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.280767] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.552931] env[69982]: DEBUG nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Successfully created port: fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1231.713896] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1231.720702] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.662s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.720884] env[69982]: INFO nova.compute.manager [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Migrating [ 1231.759181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.759478] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.761266] env[69982]: INFO nova.compute.claims [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.767118] env[69982]: DEBUG nova.compute.manager [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-vif-plugged-af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1231.767339] env[69982]: DEBUG oslo_concurrency.lockutils [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.767568] env[69982]: DEBUG oslo_concurrency.lockutils [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.767733] env[69982]: DEBUG oslo_concurrency.lockutils [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.769129] env[69982]: DEBUG nova.compute.manager [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] No waiting events found dispatching network-vif-plugged-af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1231.769129] env[69982]: WARNING nova.compute.manager [req-b3935cd7-15b3-46a2-93eb-9b3689c68de1 req-83b328bf-e762-425c-9e25-c3f03c0b9432 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received unexpected event network-vif-plugged-af76a056-445a-4517-a3bd-559946857a62 for instance with vm_state building and task_state spawning. [ 1231.784101] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865562, 'name': ReconfigVM_Task, 'duration_secs': 0.388228} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.784786] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748/8089e191-85df-46cd-8a6b-415bfd5d6748.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1231.785484] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f17028c-091d-4241-ac88-98f36b991ef1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.809257] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Successfully updated port: af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1231.814836] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c6a4f1-094f-4095-b1f2-6dad0f901d66 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.843855] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10dd1b0-8979-469a-a755-2d872541c961 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.867543] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8ef52a-3a44-41e0-8621-dffb37d2f62a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.876351] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1231.876697] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64c821e0-be35-4ff0-9e88-6823640b19f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.884102] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1231.884102] env[69982]: value = "task-3865563" [ 1231.884102] env[69982]: _type = "Task" [ 1231.884102] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.895821] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865563, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.898447] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1231.898730] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.898863] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1231.899073] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.899214] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1231.899443] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1231.899588] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1231.899798] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1231.899980] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1231.900172] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1231.900351] env[69982]: DEBUG nova.virt.hardware [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1231.901228] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6e4b90-82c4-415e-98a9-f6377106dff4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.911429] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d68240-941f-43ab-8b03-5b37c1f620c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.221113] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1232.221739] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13fbd957-0c07-4cbf-bfa2-365e9327364f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.231102] env[69982]: DEBUG oslo_vmware.api [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1232.231102] env[69982]: value = "task-3865564" [ 1232.231102] env[69982]: _type = "Task" [ 1232.231102] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.241960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.242251] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.242522] env[69982]: DEBUG nova.network.neutron [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1232.244518] env[69982]: DEBUG oslo_vmware.api [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.396329] env[69982]: DEBUG oslo_vmware.api [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865563, 'name': PowerOnVM_Task, 'duration_secs': 0.419371} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.396624] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.748314] env[69982]: DEBUG oslo_vmware.api [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865564, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.967268] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c506d9b-eebf-4527-badc-9a860328457c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.978541] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882e6611-de0d-4898-8fa3-2ad9d395b590 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.018961] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fda9b85-ae2e-412c-a370-8bdcdb3a50f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.027934] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c77e518-9b5a-40af-a3ee-0d399475e813 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.042652] env[69982]: DEBUG nova.compute.provider_tree [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.243207] env[69982]: DEBUG oslo_vmware.api [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865564, 'name': PowerOnVM_Task, 'duration_secs': 0.818973} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.243685] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1233.243761] env[69982]: DEBUG nova.compute.manager [None req-a10820b1-23b6-4ce5-93d1-27d29da4db7f tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1233.244798] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0209b0e-beb9-47bc-a108-3aeef73cd978 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.309605] env[69982]: DEBUG nova.network.neutron [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.328388] env[69982]: DEBUG nova.compute.manager [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-plugged-fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1233.328615] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.328822] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.328989] env[69982]: DEBUG oslo_concurrency.lockutils [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.329177] env[69982]: DEBUG nova.compute.manager [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] No waiting events found dispatching network-vif-plugged-fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1233.329344] env[69982]: WARNING nova.compute.manager [req-6d597f57-b823-4d45-9537-1e92939c7775 req-2922d035-a7c8-4213-ac60-05f6523e6e22 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received unexpected event network-vif-plugged-fc8b183b-39e0-493b-951b-cb7e8c614a94 for instance with vm_state active and task_state None. [ 1233.427276] env[69982]: DEBUG nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Successfully updated port: fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.453441] env[69982]: INFO nova.compute.manager [None req-f636abfb-05bf-43e9-8aaf-982179688199 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance to original state: 'active' [ 1233.545603] env[69982]: DEBUG nova.scheduler.client.report [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1233.792887] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Successfully updated port: c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1233.812570] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.864515] env[69982]: DEBUG nova.compute.manager [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-changed-af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1233.864774] env[69982]: DEBUG nova.compute.manager [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Refreshing instance network info cache due to event network-changed-af76a056-445a-4517-a3bd-559946857a62. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1233.864965] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Acquiring lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.865119] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Acquired lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.865281] env[69982]: DEBUG nova.network.neutron [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Refreshing network info cache for port af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1233.930283] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.930501] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.930645] env[69982]: DEBUG nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1234.050484] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.291s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.051187] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1234.115036] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.115311] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.297368] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.346351] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1234.346606] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.346835] env[69982]: INFO nova.compute.manager [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Shelving [ 1234.422259] env[69982]: DEBUG nova.network.neutron [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1234.467503] env[69982]: WARNING nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1234.556648] env[69982]: DEBUG nova.compute.utils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1234.561173] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1234.561342] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1234.564603] env[69982]: DEBUG nova.network.neutron [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.623181] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.623181] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.623181] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.623181] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.658929] env[69982]: DEBUG nova.policy [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '361bff09a25e4b5ab6a071a458858131', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afaf89b0250d46048813da25c754e1a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1234.938412] env[69982]: DEBUG nova.network.neutron [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "address": "fa:16:3e:b0:d6:d7", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc8b183b-39", "ovs_interfaceid": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.067025] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1235.070486] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Releasing lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.070741] env[69982]: DEBUG nova.compute.manager [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-vif-plugged-c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.070944] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.071152] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.071317] env[69982]: DEBUG oslo_concurrency.lockutils [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.071480] env[69982]: DEBUG nova.compute.manager [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] No waiting events found dispatching network-vif-plugged-c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1235.071683] env[69982]: WARNING nova.compute.manager [req-381d6383-96b9-4169-96f3-9bcedba6e854 req-e2b4ce7d-e064-4415-9539-b4e543133488 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received unexpected event network-vif-plugged-c28443e0-917c-4e53-b3b2-f3667e0c96e4 for instance with vm_state building and task_state spawning. [ 1235.072362] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.073231] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1235.074171] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Successfully created port: d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1235.128381] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Getting list of instances from cluster (obj){ [ 1235.128381] env[69982]: value = "domain-c8" [ 1235.128381] env[69982]: _type = "ClusterComputeResource" [ 1235.128381] env[69982]: } {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1235.130450] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0233bea4-7383-4916-8390-3a328a15f975 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.152938] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Got total of 8 instances {{(pid=69982) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1235.152938] env[69982]: WARNING nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] While synchronizing instance power states, found 10 instances in the database and 8 instances on the hypervisor. [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 31f56d0e-7c64-4fe3-917e-7ebb814ae924 {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 92f898e2-0dfd-45ed-b74b-958f6c5af844 {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 9815a4f9-3827-4e83-b897-18edadcac55b {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 54b91e61-1302-40e6-b928-fcca31cd9b3a {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Triggering sync for uuid cb226b65-d91f-4216-9844-37c22d3705a7 {{(pid=69982) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1235.152938] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.154518] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.154898] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.155144] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.155456] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.155648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.155896] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.156101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.156360] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.156591] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.156790] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.156979] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.157227] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.157402] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.157590] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] During sync_power_state the instance has a pending task (resize_migrating). Skip. [ 1235.157752] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.157954] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.158310] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.158548] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.158751] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.160470] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1235.160470] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536a3e51-013d-44d2-a080-ed6f36740018 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.163416] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc9c4a9-2983-4941-bd79-109318590d3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.166658] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a9fcad-b026-4317-9b3e-e72a4e717260 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.169728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7860afa4-1484-46f9-9e14-1efcefc25f2b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.172984] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f1a27d-bc81-4013-af8d-ba26ffddc70a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.178569] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258edc1c-ecf1-4106-b53b-7a7e2ad2b4d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.181703] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.204451] env[69982]: WARNING oslo_messaging._drivers.amqpdriver [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1235.256094] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.327181] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a85bfa6-dd4f-4392-9a77-b6b76ce940b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.347079] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 0 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1235.358558] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.358874] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6c8cb19-2b51-424e-a7cc-2f444a62a385 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.367513] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1235.367513] env[69982]: value = "task-3865565" [ 1235.367513] env[69982]: _type = "Task" [ 1235.367513] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.377885] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.397304] env[69982]: DEBUG nova.compute.manager [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-changed-fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1235.397761] env[69982]: DEBUG nova.compute.manager [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing instance network info cache due to event network-changed-fc8b183b-39e0-493b-951b-cb7e8c614a94. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1235.397761] env[69982]: DEBUG oslo_concurrency.lockutils [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.441337] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.441973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.442228] env[69982]: DEBUG oslo_concurrency.lockutils [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.442434] env[69982]: DEBUG nova.network.neutron [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing network info cache for port fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1235.557663] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.611650] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1235.687789] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.688142] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.688614] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.688791] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1235.689697] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27037d5-cadb-4247-8d29-2f5c050b373f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.697985] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.699369] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5031f3b5-1281-4d59-975f-a9915ee7f858 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.716663] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] During sync_power_state the instance has a pending task (deleting). Skip. [ 1235.716806] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.717272] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] During sync_power_state the instance has a pending task (deleting). Skip. [ 1235.717402] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.717705] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.717993] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.718297] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.720951] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.465s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.721200] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.721400] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.721564] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.723176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.166s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.723331] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.723523] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.723684] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1235.725038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.725781] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74466e0b-1d9a-4571-a262-0c06329e68cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.728523] env[69982]: INFO nova.compute.manager [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Terminating instance [ 1235.730124] env[69982]: INFO nova.compute.manager [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Terminating instance [ 1235.731742] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46556463-20fb-48c8-8a32-6605debdc5c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.752974] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479277c1-e812-413f-9c41-7a530a510a9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.757780] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1235.758009] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1235.758178] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1235.758362] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1235.758507] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1235.758652] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1235.758853] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1235.759012] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1235.759188] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1235.759352] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1235.759524] env[69982]: DEBUG nova.virt.hardware [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1235.767033] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfiguring VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1235.767921] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3e40eb6-b055-4f9e-b473-251692266577 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.811995] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179745MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1235.812195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1235.812449] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1235.817596] env[69982]: DEBUG oslo_vmware.api [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1235.817596] env[69982]: value = "task-3865566" [ 1235.817596] env[69982]: _type = "Task" [ 1235.817596] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.853069] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1235.853424] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f45c882e-e0c5-4187-b6b8-bfd60e9870e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.861244] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1235.861244] env[69982]: value = "task-3865567" [ 1235.861244] env[69982]: _type = "Task" [ 1235.861244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.871967] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.888957] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865565, 'name': PowerOffVM_Task, 'duration_secs': 0.211947} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.888957] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1235.888957] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3cde87-fd2e-4ac4-93c8-58d8fd07cf72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.908907] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28c6026-1813-4bb2-a008-4285a5d146fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.969145] env[69982]: DEBUG nova.network.neutron [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Updating instance_info_cache with network_info: [{"id": "af76a056-445a-4517-a3bd-559946857a62", "address": "fa:16:3e:22:47:51", "network": {"id": "8fe9df9f-b21a-464f-abc7-855ba82dca1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2001605232", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf76a056-44", "ovs_interfaceid": "af76a056-445a-4517-a3bd-559946857a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "address": "fa:16:3e:8d:41:c1", "network": {"id": "20453287-ef80-4bfb-906e-149ef01b4d9d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-244557927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc28443e0-91", "ovs_interfaceid": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.001156] env[69982]: DEBUG nova.compute.manager [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-changed-c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1236.001397] env[69982]: DEBUG nova.compute.manager [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Refreshing instance network info cache due to event network-changed-c28443e0-917c-4e53-b3b2-f3667e0c96e4. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1236.001629] env[69982]: DEBUG oslo_concurrency.lockutils [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] Acquiring lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1236.077400] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1236.109942] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.110294] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.110473] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.110742] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.111015] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.111185] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.111420] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.111588] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.111763] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.111932] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.112135] env[69982]: DEBUG nova.virt.hardware [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.113488] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52069fa3-cf41-486d-a7c0-d3409c862c58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.123309] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c356a4-2d13-46e5-829a-e6d4f3f04356 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.180267] env[69982]: DEBUG nova.network.neutron [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updated VIF entry in instance network info cache for port fc8b183b-39e0-493b-951b-cb7e8c614a94. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1236.180723] env[69982]: DEBUG nova.network.neutron [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "address": "fa:16:3e:b0:d6:d7", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc8b183b-39", "ovs_interfaceid": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.238462] env[69982]: DEBUG nova.compute.manager [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1236.239166] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.239353] env[69982]: DEBUG nova.compute.manager [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1236.239564] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.239800] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5c8d1fe-8f1d-4386-b97d-25bf09e23089 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.241656] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3799415e-d367-46fa-b8b1-29ecfdfbf96a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.250071] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1236.250071] env[69982]: value = "task-3865568" [ 1236.250071] env[69982]: _type = "Task" [ 1236.250071] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.251821] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1236.251821] env[69982]: value = "task-3865569" [ 1236.251821] env[69982]: _type = "Task" [ 1236.251821] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.263915] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865568, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.339100] env[69982]: DEBUG oslo_vmware.api [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865566, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.372344] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865567, 'name': PowerOffVM_Task, 'duration_secs': 0.288468} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.372734] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.372942] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 17 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1236.424103] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1236.424689] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f3ec8519-b6a6-4569-83a1-1bd6499cc7a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.437594] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1236.437594] env[69982]: value = "task-3865570" [ 1236.437594] env[69982]: _type = "Task" [ 1236.437594] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.450013] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865570, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.471975] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.472455] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance network_info: |[{"id": "af76a056-445a-4517-a3bd-559946857a62", "address": "fa:16:3e:22:47:51", "network": {"id": "8fe9df9f-b21a-464f-abc7-855ba82dca1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2001605232", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf76a056-44", "ovs_interfaceid": "af76a056-445a-4517-a3bd-559946857a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "address": "fa:16:3e:8d:41:c1", "network": {"id": "20453287-ef80-4bfb-906e-149ef01b4d9d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-244557927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc28443e0-91", "ovs_interfaceid": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1236.472820] env[69982]: DEBUG oslo_concurrency.lockutils [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] Acquired lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1236.473063] env[69982]: DEBUG nova.network.neutron [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Refreshing network info cache for port c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1236.474716] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:22:47:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c894ab55-c869-4530-9702-cb46d173ce94', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af76a056-445a-4517-a3bd-559946857a62', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:41:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ae4e3171-21cd-4094-b6cf-81bf366c75bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c28443e0-917c-4e53-b3b2-f3667e0c96e4', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1236.484603] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.485247] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1236.485531] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-327c992f-fdd6-43df-8ead-077f81d651c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.511441] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1236.511441] env[69982]: value = "task-3865571" [ 1236.511441] env[69982]: _type = "Task" [ 1236.511441] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.525380] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865571, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.660378] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Successfully updated port: d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1236.685796] env[69982]: DEBUG oslo_concurrency.lockutils [req-00725a08-9adc-4643-a7a7-a310e819e8d2 req-cc453186-f0da-460a-9394-75a61e23cd14 service nova] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.768073] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865568, 'name': PowerOffVM_Task, 'duration_secs': 0.22327} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.771873] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.772262] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1236.772654] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768074', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'name': 'volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '9815a4f9-3827-4e83-b897-18edadcac55b', 'attached_at': '2025-05-07T07:14:28.000000', 'detached_at': '', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'serial': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1236.772926] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865569, 'name': PowerOffVM_Task, 'duration_secs': 0.272271} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.773895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0f8f0a-50bc-4ee6-b6b8-4fd2cc7c193f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.777758] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1236.777973] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1236.778360] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768097', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'name': 'volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8089e191-85df-46cd-8a6b-415bfd5d6748', 'attached_at': '2025-05-07T07:15:00.000000', 'detached_at': '', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'serial': '2dfcb6d8-aab9-4474-86ca-8a2566cef584'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1236.779187] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6ecbec-a7b8-47b8-b8e5-aeb415b16313 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.816253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3660d878-b615-4714-91b9-9fbc7f42394c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.819916] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb160bd-4ce2-44db-a122-cabeae8ff43c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.832966] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b7baba-798e-405c-844d-1cfbe60528ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.835980] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Applying migration context for instance 54b91e61-1302-40e6-b928-fcca31cd9b3a as it has an incoming, in-progress migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae. Migration status is migrating {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1236.837458] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating resource usage from migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae [ 1236.840206] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbeca1f-ff80-4bc5-9c92-7eff0af55ceb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.850687] env[69982]: DEBUG oslo_vmware.api [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865566, 'name': ReconfigVM_Task, 'duration_secs': 0.914488} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.880846] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.881109] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfigured VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1236.885567] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1236.885831] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1236.885913] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1236.886112] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1236.886257] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1236.886406] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1236.886612] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1236.886769] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1236.886937] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1236.887115] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1236.887288] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1236.893781] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894303] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894303] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ce24e165-230a-44bb-ae46-d1479e71585a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894422] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894594] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 9815a4f9-3827-4e83-b897-18edadcac55b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894750] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.894943] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8089e191-85df-46cd-8a6b-415bfd5d6748 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.895113] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.895363] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1236.895558] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 54b91e61-1302-40e6-b928-fcca31cd9b3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.895845] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance cb226b65-d91f-4216-9844-37c22d3705a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1236.896042] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1236.896531] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2752MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1236.899982] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12298866-9fcf-41dd-84a5-e9615fa210be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.903292] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bca535a7-d8cb-4acb-b839-e63afbafd0de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.916968] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283abf89-1667-46f6-88a9-ced52e565c79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.937088] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] The volume has not been displaced from its original location: [datastore2] volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085/volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1236.942601] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1236.957819] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be2db352-9f9c-4e2d-9430-a31abe0df4ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.970625] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] The volume has not been displaced from its original location: [datastore2] volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584/volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1236.976238] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1236.976531] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1236.976531] env[69982]: value = "task-3865572" [ 1236.976531] env[69982]: _type = "Task" [ 1236.976531] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.980199] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-00f8b96a-a9a3-473a-988d-7037f155a87e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.003779] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1237.003779] env[69982]: value = "task-3865573" [ 1237.003779] env[69982]: _type = "Task" [ 1237.003779] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.007450] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865570, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.014641] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865572, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.016022] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1237.016022] env[69982]: value = "task-3865574" [ 1237.016022] env[69982]: _type = "Task" [ 1237.016022] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.032078] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865573, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.038752] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865571, 'name': CreateVM_Task, 'duration_secs': 0.437258} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.042793] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1237.043200] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.047278] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.047565] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.047919] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1237.048566] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2e19999-6c6f-4fe6-be26-193e960f6677 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.054340] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1237.054340] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52276835-6b9f-7595-57f3-aaea63498060" [ 1237.054340] env[69982]: _type = "Task" [ 1237.054340] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.065665] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52276835-6b9f-7595-57f3-aaea63498060, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.161468] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c705e565-005e-4f57-85c8-ac82d5337ffc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.168300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.168300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.168871] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1237.178280] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb97e7bb-fed4-4409-9039-dcc8bc1c9243 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.214693] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460434b4-aaf0-4db9-a3b9-9357a4669693 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.223917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34bbe64c-f6fe-43e8-b2dc-0c76f7cfd981 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.240188] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.319446] env[69982]: DEBUG nova.network.neutron [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Updated VIF entry in instance network info cache for port c28443e0-917c-4e53-b3b2-f3667e0c96e4. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1237.319875] env[69982]: DEBUG nova.network.neutron [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Updating instance_info_cache with network_info: [{"id": "af76a056-445a-4517-a3bd-559946857a62", "address": "fa:16:3e:22:47:51", "network": {"id": "8fe9df9f-b21a-464f-abc7-855ba82dca1f", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2001605232", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c894ab55-c869-4530-9702-cb46d173ce94", "external-id": "nsx-vlan-transportzone-792", "segmentation_id": 792, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf76a056-44", "ovs_interfaceid": "af76a056-445a-4517-a3bd-559946857a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "address": "fa:16:3e:8d:41:c1", "network": {"id": "20453287-ef80-4bfb-906e-149ef01b4d9d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-244557927", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.208", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "4d4602e8ada243b0a9bf86ea9677f14b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ae4e3171-21cd-4094-b6cf-81bf366c75bd", "external-id": "nsx-vlan-transportzone-193", "segmentation_id": 193, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc28443e0-91", "ovs_interfaceid": "c28443e0-917c-4e53-b3b2-f3667e0c96e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.401423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ffc91c0b-896a-4ddd-a942-fe8f3a6616e2 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.825s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.468370] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865570, 'name': CreateSnapshot_Task, 'duration_secs': 0.633593} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.468699] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1237.469445] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee687357-135c-4ec4-8875-5fd957f99a55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.499027] env[69982]: DEBUG nova.compute.manager [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Received event network-vif-plugged-d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.499262] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.499468] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.499638] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.499804] env[69982]: DEBUG nova.compute.manager [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] No waiting events found dispatching network-vif-plugged-d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1237.499971] env[69982]: WARNING nova.compute.manager [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Received unexpected event network-vif-plugged-d4331d9d-de6b-48c6-9e45-c68bc45f7209 for instance with vm_state building and task_state spawning. [ 1237.500143] env[69982]: DEBUG nova.compute.manager [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Received event network-changed-d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1237.500298] env[69982]: DEBUG nova.compute.manager [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Refreshing instance network info cache due to event network-changed-d4331d9d-de6b-48c6-9e45-c68bc45f7209. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1237.500462] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Acquiring lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.507646] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865572, 'name': ReconfigVM_Task, 'duration_secs': 0.215699} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.508069] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 33 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1237.525062] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865573, 'name': ReconfigVM_Task, 'duration_secs': 0.178683} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.525844] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1237.531620] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07573738-04f9-4e67-8e50-85360072b475 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.544813] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865574, 'name': ReconfigVM_Task, 'duration_secs': 0.211066} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.545534] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1237.554023] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b54f71c-9ea7-4f68-b8ba-88cc3ab9fc0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.562534] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1237.562534] env[69982]: value = "task-3865575" [ 1237.562534] env[69982]: _type = "Task" [ 1237.562534] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.571948] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1237.571948] env[69982]: value = "task-3865576" [ 1237.571948] env[69982]: _type = "Task" [ 1237.571948] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.576552] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52276835-6b9f-7595-57f3-aaea63498060, 'name': SearchDatastore_Task, 'duration_secs': 0.011223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.583327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.583607] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1237.583842] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1237.583990] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1237.584200] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1237.584506] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.585010] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f23cb48-9491-452b-a00a-2c8a070d9ba7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.592384] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.603627] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1237.603844] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1237.604739] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebadbba3-6e19-4639-978c-60b47702ee8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.610656] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1237.610656] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523188ff-64cb-bc85-7deb-7cc27c22e7ec" [ 1237.610656] env[69982]: _type = "Task" [ 1237.610656] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.619118] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523188ff-64cb-bc85-7deb-7cc27c22e7ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.708687] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1237.743218] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.822934] env[69982]: DEBUG oslo_concurrency.lockutils [req-b45ad8b6-757c-4650-a66c-6122336bfd42 req-31541aa7-dfec-4abc-9c62-79801838aab7 service nova] Releasing lock "refresh_cache-4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1237.842974] env[69982]: DEBUG nova.network.neutron [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updating instance_info_cache with network_info: [{"id": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "address": "fa:16:3e:2e:4f:6f", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4331d9d-de", "ovs_interfaceid": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.988423] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1237.989098] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-08d7453a-2e5d-4743-b81f-20f29d7e5cd1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.997872] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1237.997872] env[69982]: value = "task-3865577" [ 1237.997872] env[69982]: _type = "Task" [ 1237.997872] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.007134] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865577, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.018375] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1238.018614] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1238.018769] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1238.018959] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1238.019108] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1238.019258] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1238.019500] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1238.019619] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1238.019795] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1238.019964] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1238.020143] env[69982]: DEBUG nova.virt.hardware [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1238.025923] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1238.026250] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35123ae0-1193-43a3-9f24-25dc5a6eff7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.048497] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1238.048497] env[69982]: value = "task-3865578" [ 1238.048497] env[69982]: _type = "Task" [ 1238.048497] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.058077] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865578, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.075095] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865575, 'name': ReconfigVM_Task, 'duration_secs': 0.160359} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.075471] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768074', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'name': 'volume-d6ce4976-1bf1-441f-8fbb-35dab72ed085', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '9815a4f9-3827-4e83-b897-18edadcac55b', 'attached_at': '2025-05-07T07:14:28.000000', 'detached_at': '', 'volume_id': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085', 'serial': 'd6ce4976-1bf1-441f-8fbb-35dab72ed085'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1238.075715] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1238.076545] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd911aa-3bc2-4229-acf3-c62d86d34218 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.088148] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865576, 'name': ReconfigVM_Task, 'duration_secs': 0.140608} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.091381] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768097', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'name': 'volume-2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '8089e191-85df-46cd-8a6b-415bfd5d6748', 'attached_at': '2025-05-07T07:15:00.000000', 'detached_at': '', 'volume_id': '2dfcb6d8-aab9-4474-86ca-8a2566cef584', 'serial': '2dfcb6d8-aab9-4474-86ca-8a2566cef584'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1238.091875] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1238.092245] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1238.093017] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4aad59-0bec-42c7-aacd-0cd3ae7ee4c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.095852] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25e280d2-b46c-4775-812d-66a62418026d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.103668] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1238.103936] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4240735b-b9a3-446a-be72-83e2cea56eeb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.120990] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]523188ff-64cb-bc85-7deb-7cc27c22e7ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010085} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.121852] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e7bddf-2d8f-43f4-99ca-bcf0a65412c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.137930] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1238.137930] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca00fc-32c9-ff70-308b-81410557bbdb" [ 1238.137930] env[69982]: _type = "Task" [ 1238.137930] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.150495] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca00fc-32c9-ff70-308b-81410557bbdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.205572] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1238.205964] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1238.206298] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleting the datastore file [datastore2] 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.209237] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2a9577d-f409-4591-a13e-7cba1dfd5959 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.212462] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1238.212930] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1238.213165] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore2] 9815a4f9-3827-4e83-b897-18edadcac55b {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1238.213566] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ccaafcd-497d-41b7-980c-419f178e720e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.225194] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1238.225194] env[69982]: value = "task-3865581" [ 1238.225194] env[69982]: _type = "Task" [ 1238.225194] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.226999] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1238.226999] env[69982]: value = "task-3865582" [ 1238.226999] env[69982]: _type = "Task" [ 1238.226999] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.239862] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.243483] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.250523] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1238.251018] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.438s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.346088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.346347] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Instance network_info: |[{"id": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "address": "fa:16:3e:2e:4f:6f", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4331d9d-de", "ovs_interfaceid": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1238.346784] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Acquired lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.346999] env[69982]: DEBUG nova.network.neutron [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Refreshing network info cache for port d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1238.348714] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:4f:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a91c3a96-63d0-407c-bcde-c3d5b58d9cb2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4331d9d-de6b-48c6-9e45-c68bc45f7209', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1238.357651] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.361133] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1238.361760] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9790fd18-b179-4511-9dd4-f6144609ee6e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.384243] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1238.384243] env[69982]: value = "task-3865583" [ 1238.384243] env[69982]: _type = "Task" [ 1238.384243] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.393508] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865583, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.509218] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865577, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.559684] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865578, 'name': ReconfigVM_Task, 'duration_secs': 0.235073} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.560051] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1238.560917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2baf2b36-a8c5-494a-818c-76d2a1587ce9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.588260] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.588260] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6d05f41-3d8b-407e-90e0-f34247c37030 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.608930] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1238.608930] env[69982]: value = "task-3865584" [ 1238.608930] env[69982]: _type = "Task" [ 1238.608930] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.619426] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865584, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.650447] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ca00fc-32c9-ff70-308b-81410557bbdb, 'name': SearchDatastore_Task, 'duration_secs': 0.016858} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.651124] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.651124] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d/4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1238.651479] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeb9c996-ecaa-466d-9515-a4643be7c9d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.659312] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1238.659312] env[69982]: value = "task-3865585" [ 1238.659312] env[69982]: _type = "Task" [ 1238.659312] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.663813] env[69982]: DEBUG nova.network.neutron [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updated VIF entry in instance network info cache for port d4331d9d-de6b-48c6-9e45-c68bc45f7209. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1238.665086] env[69982]: DEBUG nova.network.neutron [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updating instance_info_cache with network_info: [{"id": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "address": "fa:16:3e:2e:4f:6f", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4331d9d-de", "ovs_interfaceid": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1238.671984] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865585, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.740249] env[69982]: DEBUG oslo_vmware.api [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214639} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.744305] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.744699] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1238.744972] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1238.745217] env[69982]: INFO nova.compute.manager [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Took 2.51 seconds to destroy the instance on the hypervisor. [ 1238.745519] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.746232] env[69982]: DEBUG oslo_vmware.api [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132305} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.746558] env[69982]: DEBUG nova.compute.manager [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1238.746689] env[69982]: DEBUG nova.network.neutron [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1238.748800] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.749097] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1238.749273] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1238.749474] env[69982]: INFO nova.compute.manager [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Took 2.51 seconds to destroy the instance on the hypervisor. [ 1238.749774] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1238.750058] env[69982]: DEBUG nova.compute.manager [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1238.750210] env[69982]: DEBUG nova.network.neutron [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1238.899564] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865583, 'name': CreateVM_Task, 'duration_secs': 0.38158} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.900146] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1238.901186] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.901381] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.902127] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1238.902565] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd78b422-379a-4d9b-911f-bf9f1e05b1f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.910486] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1238.910486] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6ba6b-6cfb-0b7d-3966-e29b2b15ed02" [ 1238.910486] env[69982]: _type = "Task" [ 1238.910486] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.925640] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6ba6b-6cfb-0b7d-3966-e29b2b15ed02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.019661] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865577, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.125051] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865584, 'name': ReconfigVM_Task, 'duration_secs': 0.382634} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.125609] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1239.125987] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 50 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1239.174985] env[69982]: DEBUG oslo_concurrency.lockutils [req-1f803a86-5c51-432f-9d12-d51244211a65 req-f8cfb696-eb04-4910-9278-e507e760f18b service nova] Releasing lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.174985] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865585, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.277060] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-138352cf-ac45-467a-99b2-d318216a2d53" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.277343] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-138352cf-ac45-467a-99b2-d318216a2d53" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.277718] env[69982]: DEBUG nova.objects.instance [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.377982] env[69982]: DEBUG nova.compute.manager [req-528156dd-0f4e-4943-be9a-a9792f256d35 req-3e9f0df7-5040-4208-b0e2-2d935d7d1747 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Received event network-vif-deleted-ed266e98-9354-46dd-a173-6c25f605f719 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.378220] env[69982]: INFO nova.compute.manager [req-528156dd-0f4e-4943-be9a-a9792f256d35 req-3e9f0df7-5040-4208-b0e2-2d935d7d1747 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Neutron deleted interface ed266e98-9354-46dd-a173-6c25f605f719; detaching it from the instance and deleting it from the info cache [ 1239.378366] env[69982]: DEBUG nova.network.neutron [req-528156dd-0f4e-4943-be9a-a9792f256d35 req-3e9f0df7-5040-4208-b0e2-2d935d7d1747 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.421961] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c6ba6b-6cfb-0b7d-3966-e29b2b15ed02, 'name': SearchDatastore_Task, 'duration_secs': 0.059509} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.423041] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.423041] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1239.423041] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.423041] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.423041] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1239.423634] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a580406c-7d91-4902-a961-e48bd1fab2ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.434226] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1239.434484] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1239.435311] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a078d6f9-6fac-43e4-b59e-4f86d4cb1a47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.443438] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1239.443438] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52eb056b-71fd-bef2-9679-cb83094eb4c7" [ 1239.443438] env[69982]: _type = "Task" [ 1239.443438] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.455280] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52eb056b-71fd-bef2-9679-cb83094eb4c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.510123] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865577, 'name': CloneVM_Task, 'duration_secs': 1.343706} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.510422] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Created linked-clone VM from snapshot [ 1239.511209] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3ff926d-45a7-45fa-ac1c-9cc271cc5521 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.520372] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Uploading image f2a59d2b-6bba-4229-910d-72dc8d0f9184 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1239.530164] env[69982]: DEBUG nova.compute.manager [req-a45ead72-e05e-4784-9a4d-acca53d5caaf req-8cf63670-e717-4e62-8c46-8741e929a8ca service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Received event network-vif-deleted-0d321236-4177-49d0-afe0-f203e679fb0a {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.530377] env[69982]: INFO nova.compute.manager [req-a45ead72-e05e-4784-9a4d-acca53d5caaf req-8cf63670-e717-4e62-8c46-8741e929a8ca service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Neutron deleted interface 0d321236-4177-49d0-afe0-f203e679fb0a; detaching it from the instance and deleting it from the info cache [ 1239.530591] env[69982]: DEBUG nova.network.neutron [req-a45ead72-e05e-4784-9a4d-acca53d5caaf req-8cf63670-e717-4e62-8c46-8741e929a8ca service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.550459] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1239.550459] env[69982]: value = "vm-768110" [ 1239.550459] env[69982]: _type = "VirtualMachine" [ 1239.550459] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1239.550732] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0723f373-7795-4a0d-8739-257bc7f7ef6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.558843] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease: (returnval){ [ 1239.558843] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d79d0b-f84e-72d1-e071-1f16bfbd16d4" [ 1239.558843] env[69982]: _type = "HttpNfcLease" [ 1239.558843] env[69982]: } obtained for exporting VM: (result){ [ 1239.558843] env[69982]: value = "vm-768110" [ 1239.558843] env[69982]: _type = "VirtualMachine" [ 1239.558843] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1239.559143] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the lease: (returnval){ [ 1239.559143] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d79d0b-f84e-72d1-e071-1f16bfbd16d4" [ 1239.559143] env[69982]: _type = "HttpNfcLease" [ 1239.559143] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1239.569504] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1239.569504] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d79d0b-f84e-72d1-e071-1f16bfbd16d4" [ 1239.569504] env[69982]: _type = "HttpNfcLease" [ 1239.569504] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1239.635218] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1cf48f-3be9-4475-81d7-548613972664 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.654961] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e49fa0-79b5-482c-bdf0-bd6aebf73917 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.673731] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 67 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1239.687642] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865585, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520598} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.688385] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d/4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1239.688385] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1239.688661] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ce14e25-f130-492d-b3eb-6e811dba6823 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.699334] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1239.699334] env[69982]: value = "task-3865587" [ 1239.699334] env[69982]: _type = "Task" [ 1239.699334] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.709153] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.822132] env[69982]: DEBUG nova.network.neutron [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.829795] env[69982]: DEBUG nova.network.neutron [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.877171] env[69982]: DEBUG nova.objects.instance [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1239.881440] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-528f8a03-d990-4485-ab7f-def7431ba100 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.891610] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33c2b72-9320-46cf-a5a8-3d61a0bb6098 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.930577] env[69982]: DEBUG nova.compute.manager [req-528156dd-0f4e-4943-be9a-a9792f256d35 req-3e9f0df7-5040-4208-b0e2-2d935d7d1747 service nova] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Detach interface failed, port_id=ed266e98-9354-46dd-a173-6c25f605f719, reason: Instance 9815a4f9-3827-4e83-b897-18edadcac55b could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1239.958933] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52eb056b-71fd-bef2-9679-cb83094eb4c7, 'name': SearchDatastore_Task, 'duration_secs': 0.0111} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.958933] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9abb034-f960-424b-8edc-0258ef735976 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.964569] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1239.964569] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52095034-c276-588f-f4c1-7d6ec8d763ee" [ 1239.964569] env[69982]: _type = "Task" [ 1239.964569] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.973464] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52095034-c276-588f-f4c1-7d6ec8d763ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.033762] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-846888f6-93c9-472f-aa16-2530a841f6dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.044958] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0c81d0-9043-410e-80ae-4fa75b74f0e7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.066759] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1240.066759] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d79d0b-f84e-72d1-e071-1f16bfbd16d4" [ 1240.066759] env[69982]: _type = "HttpNfcLease" [ 1240.066759] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1240.077582] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1240.077582] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52d79d0b-f84e-72d1-e071-1f16bfbd16d4" [ 1240.077582] env[69982]: _type = "HttpNfcLease" [ 1240.077582] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1240.077723] env[69982]: DEBUG nova.compute.manager [req-a45ead72-e05e-4784-9a4d-acca53d5caaf req-8cf63670-e717-4e62-8c46-8741e929a8ca service nova] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Detach interface failed, port_id=0d321236-4177-49d0-afe0-f203e679fb0a, reason: Instance 8089e191-85df-46cd-8a6b-415bfd5d6748 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1240.078580] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515dd391-4a11-40de-a62a-7653cb22bcca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.086966] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1240.087180] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1240.176467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-98d794ce-a2ee-4e64-8f2b-46e0c73669fb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.215652] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072608} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.215968] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1240.216848] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311a1bf6-016a-43ef-bfd3-1e68da922a46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.222820] env[69982]: DEBUG nova.network.neutron [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Port 98b299b9-3425-43e1-95bf-4acd909b7ad4 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1240.244298] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d/4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.245446] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-110c9cd5-c555-40f0-b405-b187e7f37904 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.267514] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1240.267514] env[69982]: value = "task-3865588" [ 1240.267514] env[69982]: _type = "Task" [ 1240.267514] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.276967] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865588, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.325281] env[69982]: INFO nova.compute.manager [-] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Took 1.57 seconds to deallocate network for instance. [ 1240.332775] env[69982]: INFO nova.compute.manager [-] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Took 1.59 seconds to deallocate network for instance. [ 1240.380053] env[69982]: DEBUG nova.objects.base [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance<36b7f89e-7552-40b9-ada4-01abfcea8310> lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1240.380358] env[69982]: DEBUG nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1240.458443] env[69982]: DEBUG nova.policy [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1240.476623] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52095034-c276-588f-f4c1-7d6ec8d763ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.476918] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1240.477245] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] cb226b65-d91f-4216-9844-37c22d3705a7/cb226b65-d91f-4216-9844-37c22d3705a7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1240.477559] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96c56d09-fdb9-482f-9631-f79379d92a06 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.486142] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1240.486142] env[69982]: value = "task-3865589" [ 1240.486142] env[69982]: _type = "Task" [ 1240.486142] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.496193] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.782731] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865588, 'name': ReconfigVM_Task, 'duration_secs': 0.331906} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.787026] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d/4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.787026] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-308cccac-9e9f-47c2-929f-e5532242e10c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.794959] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1240.794959] env[69982]: value = "task-3865590" [ 1240.794959] env[69982]: _type = "Task" [ 1240.794959] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.807730] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865590, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.883759] env[69982]: INFO nova.compute.manager [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Took 0.56 seconds to detach 1 volumes for instance. [ 1240.886825] env[69982]: INFO nova.compute.manager [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Took 0.55 seconds to detach 1 volumes for instance. [ 1240.889373] env[69982]: DEBUG nova.compute.manager [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Deleting volume: d6ce4976-1bf1-441f-8fbb-35dab72ed085 {{(pid=69982) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1240.999028] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865589, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.270646] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.271047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.271314] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.306314] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865590, 'name': Rename_Task, 'duration_secs': 0.197551} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.307015] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1241.307323] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df2131e9-f970-43ef-9304-e1c0184979cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.316763] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1241.316763] env[69982]: value = "task-3865592" [ 1241.316763] env[69982]: _type = "Task" [ 1241.316763] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.327302] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865592, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.398694] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.399012] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.399261] env[69982]: DEBUG nova.objects.instance [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'resources' on Instance uuid 8089e191-85df-46cd-8a6b-415bfd5d6748 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1241.448344] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.498567] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513415} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.499085] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] cb226b65-d91f-4216-9844-37c22d3705a7/cb226b65-d91f-4216-9844-37c22d3705a7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1241.499417] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1241.499795] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da740986-3676-4f84-8c4f-166f9b253ef3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.509101] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1241.509101] env[69982]: value = "task-3865593" [ 1241.509101] env[69982]: _type = "Task" [ 1241.509101] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.520646] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.829765] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865592, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.919567] env[69982]: DEBUG nova.compute.manager [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-plugged-138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1241.919965] env[69982]: DEBUG oslo_concurrency.lockutils [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.920346] env[69982]: DEBUG oslo_concurrency.lockutils [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.920609] env[69982]: DEBUG oslo_concurrency.lockutils [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.920791] env[69982]: DEBUG nova.compute.manager [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] No waiting events found dispatching network-vif-plugged-138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1241.921167] env[69982]: WARNING nova.compute.manager [req-99f48815-7a87-4dcd-a8ea-3b380b9ffd9b req-666bcfc3-efd4-4dff-bcf6-0725c461fe9b service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received unexpected event network-vif-plugged-138352cf-ac45-467a-99b2-d318216a2d53 for instance with vm_state active and task_state None. [ 1242.025890] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074281} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.026201] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1242.027073] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a03c34-e528-4b7c-94f3-55f29ca410b6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.033709] env[69982]: DEBUG nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Successfully updated port: 138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1242.060868] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] cb226b65-d91f-4216-9844-37c22d3705a7/cb226b65-d91f-4216-9844-37c22d3705a7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1242.064591] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be113759-12a3-4286-82b6-6ba4ce6fddec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.086618] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1242.086618] env[69982]: value = "task-3865594" [ 1242.086618] env[69982]: _type = "Task" [ 1242.086618] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.097011] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.120211] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7771255-c67d-4862-a80c-e2c14d2eb6d7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.129908] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85223db2-91c1-4728-9c13-2e5880739f0d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.166047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2fdd24-bce9-4459-82e9-d7304c36604a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.174774] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becacfd5-1a79-4b03-9796-47b71dfa2351 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.189904] env[69982]: DEBUG nova.compute.provider_tree [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1242.309569] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.309775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.309952] env[69982]: DEBUG nova.network.neutron [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1242.329528] env[69982]: DEBUG oslo_vmware.api [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865592, 'name': PowerOnVM_Task, 'duration_secs': 0.738903} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.329715] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1242.329896] env[69982]: INFO nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Took 10.62 seconds to spawn the instance on the hypervisor. [ 1242.330195] env[69982]: DEBUG nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1242.330985] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca67bbb-8f0c-46a7-ba2d-3d703c02d882 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.537903] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.538173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.538403] env[69982]: DEBUG nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1242.597894] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865594, 'name': ReconfigVM_Task, 'duration_secs': 0.43287} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.598209] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfigured VM instance instance-00000075 to attach disk [datastore1] cb226b65-d91f-4216-9844-37c22d3705a7/cb226b65-d91f-4216-9844-37c22d3705a7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.598879] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2de4e671-9d49-457b-a409-f12b4ed409ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.607506] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1242.607506] env[69982]: value = "task-3865595" [ 1242.607506] env[69982]: _type = "Task" [ 1242.607506] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.618032] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865595, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.694624] env[69982]: DEBUG nova.scheduler.client.report [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1242.853889] env[69982]: INFO nova.compute.manager [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Took 17.11 seconds to build instance. [ 1243.060967] env[69982]: DEBUG nova.network.neutron [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.083532] env[69982]: WARNING nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1243.083752] env[69982]: WARNING nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1243.117885] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865595, 'name': Rename_Task, 'duration_secs': 0.208884} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.120851] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1243.121190] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e323aaa8-50f6-4847-a904-33c71b967893 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.129924] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1243.129924] env[69982]: value = "task-3865596" [ 1243.129924] env[69982]: _type = "Task" [ 1243.129924] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.139695] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.201029] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.203187] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.755s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.203473] env[69982]: DEBUG nova.objects.instance [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'resources' on Instance uuid 9815a4f9-3827-4e83-b897-18edadcac55b {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.237120] env[69982]: INFO nova.scheduler.client.report [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted allocations for instance 8089e191-85df-46cd-8a6b-415bfd5d6748 [ 1243.356523] env[69982]: DEBUG oslo_concurrency.lockutils [None req-b887c866-ebd5-487b-8ea3-3832f9069356 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.619s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.356825] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.199s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.357045] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] During sync_power_state the instance has a pending task (spawning). Skip. [ 1243.357230] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.563747] env[69982]: DEBUG oslo_concurrency.lockutils [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.643186] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.710847] env[69982]: DEBUG nova.network.neutron [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "address": "fa:16:3e:b0:d6:d7", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc8b183b-39", "ovs_interfaceid": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "138352cf-ac45-467a-99b2-d318216a2d53", "address": "fa:16:3e:d8:f2:7f", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap138352cf-ac", "ovs_interfaceid": "138352cf-ac45-467a-99b2-d318216a2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1243.746107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fa04e837-3293-41db-93ef-4807e4d061e2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "8089e191-85df-46cd-8a6b-415bfd5d6748" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.025s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.879241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8fa5c7-20f5-4862-a1c1-73332576c850 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.888811] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d564875-ca6a-4a67-b267-916d6f30e5f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.924895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b211c78e-5781-4ef4-b104-679068cbc30c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.935237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791e6b4f-9d7c-41b6-a7d6-d57e9dc98d5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.951222] env[69982]: DEBUG nova.compute.provider_tree [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.090748] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231b90c8-745b-42fb-bd40-77c206f20b94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.111570] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c15889-dcab-439a-8d4b-c93619dea811 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.121316] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 83 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1244.142712] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.212774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1244.213525] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.213693] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.214587] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6f4752-5b77-4fc1-85fd-3d0d121a7b4b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.233830] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1244.234093] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1244.234260] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1244.234490] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1244.234670] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1244.234823] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1244.235051] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1244.235284] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1244.235386] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1244.235562] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1244.235720] env[69982]: DEBUG nova.virt.hardware [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1244.242400] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfiguring VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1244.242897] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d027d062-56cc-4ea6-bc1b-88d55fbaeace {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.262391] env[69982]: DEBUG oslo_vmware.api [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1244.262391] env[69982]: value = "task-3865597" [ 1244.262391] env[69982]: _type = "Task" [ 1244.262391] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.274168] env[69982]: DEBUG oslo_vmware.api [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865597, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.292167] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.292323] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.293579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.293579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.293579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.295525] env[69982]: INFO nova.compute.manager [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Terminating instance [ 1244.352175] env[69982]: DEBUG nova.compute.manager [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-changed-138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1244.352538] env[69982]: DEBUG nova.compute.manager [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing instance network info cache due to event network-changed-138352cf-ac45-467a-99b2-d318216a2d53. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1244.352863] env[69982]: DEBUG oslo_concurrency.lockutils [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.353137] env[69982]: DEBUG oslo_concurrency.lockutils [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.353506] env[69982]: DEBUG nova.network.neutron [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Refreshing network info cache for port 138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1244.456855] env[69982]: DEBUG nova.scheduler.client.report [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.628306] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1244.628716] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-935aadb2-9012-4abe-a333-690ffde1123f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.638595] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1244.638595] env[69982]: value = "task-3865598" [ 1244.638595] env[69982]: _type = "Task" [ 1244.638595] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.645736] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.651820] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.776608] env[69982]: DEBUG oslo_vmware.api [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865597, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.799968] env[69982]: DEBUG nova.compute.manager [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1244.800273] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1244.801181] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459bd532-5232-4cdb-89aa-a415664d5ba7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.810296] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1244.810641] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-febd9535-be80-4cb4-99bc-73bd20ca0468 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.819573] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1244.819573] env[69982]: value = "task-3865599" [ 1244.819573] env[69982]: _type = "Task" [ 1244.819573] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.830114] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.913950] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.918697] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.963805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.001333] env[69982]: INFO nova.scheduler.client.report [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocations for instance 9815a4f9-3827-4e83-b897-18edadcac55b [ 1245.148959] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.156392] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865598, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.223193] env[69982]: DEBUG nova.network.neutron [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updated VIF entry in instance network info cache for port 138352cf-ac45-467a-99b2-d318216a2d53. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1245.223703] env[69982]: DEBUG nova.network.neutron [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "address": "fa:16:3e:b0:d6:d7", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc8b183b-39", "ovs_interfaceid": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "138352cf-ac45-467a-99b2-d318216a2d53", "address": "fa:16:3e:d8:f2:7f", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap138352cf-ac", "ovs_interfaceid": "138352cf-ac45-467a-99b2-d318216a2d53", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.273846] env[69982]: DEBUG oslo_vmware.api [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865597, 'name': ReconfigVM_Task, 'duration_secs': 0.688869} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.284312] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1245.291941] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.292291] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfigured VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1245.329683] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865599, 'name': PowerOffVM_Task, 'duration_secs': 0.266647} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.329991] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.330195] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1245.330514] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89f848d3-927f-46ba-88e8-decdf50340ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.410699] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.410973] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.513312] env[69982]: DEBUG oslo_concurrency.lockutils [None req-e00997cb-e7b8-428d-a6d0-0d8b4d10ed9a tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "9815a4f9-3827-4e83-b897-18edadcac55b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.790s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.647271] env[69982]: DEBUG oslo_vmware.api [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865596, 'name': PowerOnVM_Task, 'duration_secs': 2.093237} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.647554] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1245.647760] env[69982]: INFO nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Took 9.57 seconds to spawn the instance on the hypervisor. [ 1245.647952] env[69982]: DEBUG nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.649167] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea75a11d-aded-4100-8ca8-28ad1967767f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.654753] env[69982]: DEBUG oslo_vmware.api [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865598, 'name': PowerOnVM_Task, 'duration_secs': 0.544278} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.655418] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1245.655638] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-2e0ae8ec-ac41-488f-9a41-c5298835b0e1 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance '54b91e61-1302-40e6-b928-fcca31cd9b3a' progress to 100 {{(pid=69982) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1245.726878] env[69982]: DEBUG oslo_concurrency.lockutils [req-1b7d9f06-cd79-4c80-a74e-4b49170aba0b req-8acb4d28-f1f9-45c3-9456-c1978c2caeb4 service nova] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.797597] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4567dc9-ffbf-4ebe-b169-4bac8fb453e3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-138352cf-ac45-467a-99b2-d318216a2d53" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.520s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.913396] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1245.963883] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.964290] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.964458] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.964720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.964911] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.967239] env[69982]: INFO nova.compute.manager [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Terminating instance [ 1246.175754] env[69982]: INFO nova.compute.manager [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Took 14.46 seconds to build instance. [ 1246.284428] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.284542] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1246.284681] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1246.446204] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.446204] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.447089] env[69982]: INFO nova.compute.claims [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1246.471642] env[69982]: DEBUG nova.compute.manager [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1246.474160] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.474160] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bde3adc-1260-4758-901b-41725c8d2636 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.483303] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.483919] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58d8f33c-0c64-4dfe-8e71-a6136bf2b38e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.492313] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1246.492313] env[69982]: value = "task-3865601" [ 1246.492313] env[69982]: _type = "Task" [ 1246.492313] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.504050] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865601, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.681038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-494f0f6e-779a-4d4a-b582-6bf94a758f36 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.712s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.681038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.521s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.681038] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] During sync_power_state the instance has a pending task (networking). Skip. [ 1246.681038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.912900] env[69982]: DEBUG nova.compute.manager [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Received event network-changed-d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1246.913107] env[69982]: DEBUG nova.compute.manager [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Refreshing instance network info cache due to event network-changed-d4331d9d-de6b-48c6-9e45-c68bc45f7209. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1246.913327] env[69982]: DEBUG oslo_concurrency.lockutils [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] Acquiring lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.913474] env[69982]: DEBUG oslo_concurrency.lockutils [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] Acquired lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.913648] env[69982]: DEBUG nova.network.neutron [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Refreshing network info cache for port d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1247.006519] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865601, 'name': PowerOffVM_Task, 'duration_secs': 0.245271} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.007077] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.007077] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1247.007243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d849b577-e651-4972-96f7-af87de3453b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.084742] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.085022] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.085646] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore1] 92f898e2-0dfd-45ed-b74b-958f6c5af844 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.086239] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e129e934-6d9b-4b98-a0a7-4feca62e1c00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.096081] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1247.096081] env[69982]: value = "task-3865603" [ 1247.096081] env[69982]: _type = "Task" [ 1247.096081] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.101964] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.101964] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.101964] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleting the datastore file [datastore1] 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.102661] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b66e1cf-a232-408f-bf99-2628b39b79f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.109223] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.115130] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for the task: (returnval){ [ 1247.115130] env[69982]: value = "task-3865604" [ 1247.115130] env[69982]: _type = "Task" [ 1247.115130] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.124668] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865604, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.280610] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.284603] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1247.610646] env[69982]: DEBUG oslo_vmware.api [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180332} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.611028] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.611151] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.611557] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.611746] env[69982]: INFO nova.compute.manager [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1247.612144] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1247.612233] env[69982]: DEBUG nova.compute.manager [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1247.612359] env[69982]: DEBUG nova.network.neutron [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1247.629900] env[69982]: DEBUG oslo_vmware.api [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Task: {'id': task-3865604, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186821} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.629900] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.629900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.629900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.629900] env[69982]: INFO nova.compute.manager [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Took 2.83 seconds to destroy the instance on the hypervisor. [ 1247.629900] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1247.629900] env[69982]: DEBUG nova.compute.manager [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1247.629900] env[69982]: DEBUG nova.network.neutron [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1247.649575] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae22a126-dadf-456e-9553-05131c641f22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.658203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a070322c-f451-4dca-9332-651c8d9a04dd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.695127] env[69982]: DEBUG nova.network.neutron [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updated VIF entry in instance network info cache for port d4331d9d-de6b-48c6-9e45-c68bc45f7209. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1247.695564] env[69982]: DEBUG nova.network.neutron [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updating instance_info_cache with network_info: [{"id": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "address": "fa:16:3e:2e:4f:6f", "network": {"id": "92a81d91-6b25-496f-84be-4577d8b320f6", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1836897930-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "afaf89b0250d46048813da25c754e1a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a91c3a96-63d0-407c-bcde-c3d5b58d9cb2", "external-id": "nsx-vlan-transportzone-170", "segmentation_id": 170, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4331d9d-de", "ovs_interfaceid": "d4331d9d-de6b-48c6-9e45-c68bc45f7209", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.701023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c31a2c-2657-4fb7-92fd-927ba3fab16b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.707589] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32aaa77a-8163-4e59-8810-4e136a38f895 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.712931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-fc8b183b-39e0-493b-951b-cb7e8c614a94" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.713191] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-fc8b183b-39e0-493b-951b-cb7e8c614a94" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.728608] env[69982]: DEBUG nova.compute.provider_tree [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.126401] env[69982]: DEBUG nova.network.neutron [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Port 98b299b9-3425-43e1-95bf-4acd909b7ad4 binding to destination host cpu-1 is already ACTIVE {{(pid=69982) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3252}} [ 1248.126401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.126401] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.126401] env[69982]: DEBUG nova.network.neutron [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1248.139388] env[69982]: DEBUG nova.compute.manager [req-d7244f59-1854-43ae-bce1-dd00ee78ae0e req-c3e36596-26c8-4227-ac8a-a6337d0dac0a service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Received event network-vif-deleted-9292dad6-1ffe-4506-ba58-fb92f9a98323 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1248.139388] env[69982]: INFO nova.compute.manager [req-d7244f59-1854-43ae-bce1-dd00ee78ae0e req-c3e36596-26c8-4227-ac8a-a6337d0dac0a service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Neutron deleted interface 9292dad6-1ffe-4506-ba58-fb92f9a98323; detaching it from the instance and deleting it from the info cache [ 1248.139481] env[69982]: DEBUG nova.network.neutron [req-d7244f59-1854-43ae-bce1-dd00ee78ae0e req-c3e36596-26c8-4227-ac8a-a6337d0dac0a service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.201711] env[69982]: DEBUG oslo_concurrency.lockutils [req-641f24e5-ef26-4076-b4c1-866d6f044de7 req-d775b906-4f7f-4fd6-bf8f-4867678cca89 service nova] Releasing lock "refresh_cache-cb226b65-d91f-4216-9844-37c22d3705a7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.217448] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1248.217697] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1248.218730] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba740360-8f87-41da-b173-49ff17bf0cd2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.240151] env[69982]: DEBUG nova.scheduler.client.report [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1248.244230] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a986c9bd-a101-4f50-8727-2437604cd750 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.279640] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfiguring VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1248.281034] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da192239-f403-4d63-823c-d34a2eabb1b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.297858] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1248.307031] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1248.307031] env[69982]: value = "task-3865605" [ 1248.307031] env[69982]: _type = "Task" [ 1248.307031] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.317977] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.609646] env[69982]: DEBUG nova.network.neutron [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.642075] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59dcc0fc-b182-4b3d-be3b-25c3aacf2174 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.653411] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437975f3-e695-47e0-9548-8d17347ac36e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.689513] env[69982]: DEBUG nova.compute.manager [req-d7244f59-1854-43ae-bce1-dd00ee78ae0e req-c3e36596-26c8-4227-ac8a-a6337d0dac0a service nova] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Detach interface failed, port_id=9292dad6-1ffe-4506-ba58-fb92f9a98323, reason: Instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1248.749036] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.749592] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1248.800744] env[69982]: DEBUG nova.network.neutron [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.802737] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.802993] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.803176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.803330] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1248.807176] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22f13f9-d8bd-43ee-ad29-be98f1dfd64e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.827526] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7a2557-c490-42e9-b734-03610998daf6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.831547] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.845140] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f56bf22-c2b4-4e3e-9c42-03736f2609d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.854730] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0647c84e-d187-41fb-9a7b-7e4097b231eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.885576] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179259MB free_disk=45GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1248.885756] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1248.885990] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.888566] env[69982]: DEBUG nova.network.neutron [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.112911] env[69982]: INFO nova.compute.manager [-] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Took 1.50 seconds to deallocate network for instance. [ 1249.258807] env[69982]: DEBUG nova.compute.utils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1249.260813] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1249.261088] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1249.267314] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1249.268597] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd56a9f-43ac-4729-bf69-ceb10c31115b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.277777] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1249.277950] env[69982]: ERROR oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk due to incomplete transfer. [ 1249.278208] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-215ad692-eaed-4215-b67a-56c89ac6296e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.286674] env[69982]: DEBUG oslo_vmware.rw_handles [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/525c61b5-a03c-1aee-3111-d8dc23c7e7bc/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1249.286862] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Uploaded image f2a59d2b-6bba-4229-910d-72dc8d0f9184 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1249.288973] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1249.289249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8503975f-93d9-4b31-9435-707aa9485cf9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.295637] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1249.295637] env[69982]: value = "task-3865606" [ 1249.295637] env[69982]: _type = "Task" [ 1249.295637] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.304125] env[69982]: INFO nova.compute.manager [-] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Took 1.67 seconds to deallocate network for instance. [ 1249.304460] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865606, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.306011] env[69982]: DEBUG nova.policy [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '64533b0ad8894d41bdf9fe921b440063', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '820dcd8333cb4a678ef562e4150518d2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1249.325455] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.396750] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.620229] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.635697] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Successfully created port: d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1249.768022] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1249.809597] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865606, 'name': Destroy_Task, 'duration_secs': 0.33952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.810168] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Destroyed the VM [ 1249.810426] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1249.810694] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7e8e9422-0823-498d-8be8-a2d04df289d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.813914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.821676] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1249.821676] env[69982]: value = "task-3865607" [ 1249.821676] env[69982]: _type = "Task" [ 1249.821676] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.830392] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.835674] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865607, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.899653] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Applying migration context for instance 54b91e61-1302-40e6-b928-fcca31cd9b3a as it has an incoming, in-progress migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae. Migration status is reverting {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1249.901040] env[69982]: INFO nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating resource usage from migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae [ 1249.904081] env[69982]: DEBUG nova.compute.manager [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69982) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1249.904318] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.924201] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924201] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924201] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ce24e165-230a-44bb-ae46-d1479e71585a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924201] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924201] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924502] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924502] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1249.924588] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 54b91e61-1302-40e6-b928-fcca31cd9b3a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.924727] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance cb226b65-d91f-4216-9844-37c22d3705a7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.925155] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6c20ba60-6552-47f3-8eb3-a71923a0a68f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1249.925155] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1249.925289] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1250.072038] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace47c7c-7d2a-4121-956a-c06c2de87d9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.080954] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f6bdec-a959-4b9c-876b-b429ac4ee398 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.113186] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979989bf-8810-4f47-9b03-505638aae341 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.122196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25aad8d5-b435-4902-ba6b-1fc12b842c4c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.136803] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.166128] env[69982]: DEBUG nova.compute.manager [req-8cf46e1a-fced-479a-9ee1-6dd958bd6df2 req-4c3aab24-07b5-4abc-9223-b5cd30f83461 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-vif-deleted-c28443e0-917c-4e53-b3b2-f3667e0c96e4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.166363] env[69982]: DEBUG nova.compute.manager [req-8cf46e1a-fced-479a-9ee1-6dd958bd6df2 req-4c3aab24-07b5-4abc-9223-b5cd30f83461 service nova] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Received event network-vif-deleted-af76a056-445a-4517-a3bd-559946857a62 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.330054] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.336034] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865607, 'name': RemoveSnapshot_Task, 'duration_secs': 0.369794} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.336158] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1250.336374] env[69982]: DEBUG nova.compute.manager [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1250.337156] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb818443-da6e-4ece-aba8-746ce3f64050 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.639773] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1250.777667] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1250.805378] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1250.805638] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1250.805795] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1250.805972] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1250.806130] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1250.806278] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1250.806485] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1250.806643] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1250.806941] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1250.807039] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1250.807153] env[69982]: DEBUG nova.virt.hardware [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1250.808037] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f67f5f0-9c79-4078-b6d9-1fd504d81de0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.816735] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b900b5a7-64a2-4a9c-b924-6f3a7bb15978 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.829569] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.849438] env[69982]: INFO nova.compute.manager [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Shelve offloading [ 1251.136118] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Successfully updated port: d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1251.146101] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1251.146101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.259s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.146101] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.526s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.146101] env[69982]: DEBUG nova.objects.instance [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'resources' on Instance uuid 92f898e2-0dfd-45ed-b74b-958f6c5af844 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1251.331663] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.353473] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1251.353861] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-81cac5e3-0677-4ace-8347-b2e59175b5d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.361669] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1251.361669] env[69982]: value = "task-3865608" [ 1251.361669] env[69982]: _type = "Task" [ 1251.361669] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.371467] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1251.371695] env[69982]: DEBUG nova.compute.manager [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1251.372485] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d63092f-1769-4f3f-b983-795e0398ec57 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.378509] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.378681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.378847] env[69982]: DEBUG nova.network.neutron [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1251.640304] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.640593] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.640839] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1251.788279] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc008008-06c1-42cb-9f37-004c0f696022 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.798563] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d264416-4761-442c-89db-b3cb8cb6a69b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.833736] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f179b6fe-8aec-479f-8a41-3705acefcc58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.845269] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50796680-6045-49a5-8925-18aacbe34941 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.849235] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.859543] env[69982]: DEBUG nova.compute.provider_tree [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1252.109253] env[69982]: DEBUG nova.network.neutron [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.186461] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1252.191826] env[69982]: DEBUG nova.compute.manager [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1252.192141] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.192375] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.192546] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.192715] env[69982]: DEBUG nova.compute.manager [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] No waiting events found dispatching network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1252.192880] env[69982]: WARNING nova.compute.manager [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received unexpected event network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d for instance with vm_state building and task_state spawning. [ 1252.193100] env[69982]: DEBUG nova.compute.manager [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1252.193303] env[69982]: DEBUG nova.compute.manager [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing instance network info cache due to event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1252.193473] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.340333] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.362706] env[69982]: DEBUG nova.scheduler.client.report [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1252.371057] env[69982]: DEBUG nova.network.neutron [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.612476] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.840868] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.874084] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.728s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.876506] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.876871] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance network_info: |[{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1252.877412] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.064s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1252.877679] env[69982]: DEBUG nova.objects.instance [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lazy-loading 'resources' on Instance uuid 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.878761] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.878995] env[69982]: DEBUG nova.network.neutron [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1252.880067] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:ed:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd95c9544-b6cb-4f15-adb5-ef91c7ef325d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1252.889225] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1252.893304] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1252.894865] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14cbe237-a0b5-4fdb-9184-fb6444869b98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.911077] env[69982]: INFO nova.scheduler.client.report [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocations for instance 92f898e2-0dfd-45ed-b74b-958f6c5af844 [ 1252.923086] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.923086] env[69982]: value = "task-3865609" [ 1252.923086] env[69982]: _type = "Task" [ 1252.923086] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.933979] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865609, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.953629] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.954788] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d665409-de02-4e1e-914f-e41e3c0b6173 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.967786] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.968101] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b6c9b30-2f2c-4be7-9a4e-56227903a3d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.040889] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1253.040889] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1253.040889] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1253.041357] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1f9f4ff-85ce-47c8-9057-b1e9f2a59ac0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.045559] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f9f7223-2ee1-4e06-9ee3-5d6368769de8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.053523] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1253.053523] env[69982]: value = "task-3865611" [ 1253.053523] env[69982]: _type = "Task" [ 1253.053523] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.054927] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b0f49d4-9c2b-4955-9e0c-798b45bb62a9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.092672] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b24c3a-d8b1-4688-9a2e-710f0b976bc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.100983] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3554e593-2e2a-41f0-82e1-87f57570a710 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.117888] env[69982]: DEBUG nova.compute.provider_tree [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.342197] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.421443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-833782c5-4e4c-4edf-8f68-53f9495030eb tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "92f898e2-0dfd-45ed-b74b-958f6c5af844" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.457s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1253.435265] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865609, 'name': CreateVM_Task, 'duration_secs': 0.332726} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.435965] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1253.436038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.436161] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.437065] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1253.437065] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78c68335-436b-4a65-b7de-fa6f4ec84975 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.442612] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1253.442612] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5298573a-d589-42a2-2f58-397bbff52649" [ 1253.442612] env[69982]: _type = "Task" [ 1253.442612] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.451645] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5298573a-d589-42a2-2f58-397bbff52649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.572643] env[69982]: DEBUG oslo_vmware.api [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158824} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.572979] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.573254] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.573449] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.614429] env[69982]: INFO nova.scheduler.client.report [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted allocations for instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f [ 1253.620615] env[69982]: DEBUG nova.scheduler.client.report [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1253.624238] env[69982]: DEBUG nova.network.neutron [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updated VIF entry in instance network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1253.624604] env[69982]: DEBUG nova.network.neutron [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.841533] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.954538] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5298573a-d589-42a2-2f58-397bbff52649, 'name': SearchDatastore_Task, 'duration_secs': 0.014733} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.954875] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.955208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1253.955443] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1253.955628] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1253.955850] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1253.956166] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31118fed-280f-4a41-a3f4-2b6773417aa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.965567] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1253.965733] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1253.966467] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fa6566d-239f-4769-a28e-f910caa288c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.972599] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1253.972599] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524aefa8-2ad1-6dac-710f-4a3739023163" [ 1253.972599] env[69982]: _type = "Task" [ 1253.972599] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.981488] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524aefa8-2ad1-6dac-710f-4a3739023163, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.120138] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.127151] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.250s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.129562] env[69982]: DEBUG oslo_concurrency.lockutils [req-d87a98b5-7b8a-4700-b234-8e30749a7748 req-05c77d26-008f-43b9-8591-5c8453b86bf0 service nova] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.130169] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 4.226s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.151496] env[69982]: INFO nova.scheduler.client.report [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Deleted allocations for instance 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d [ 1254.231709] env[69982]: DEBUG nova.compute.manager [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-vif-unplugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1254.231933] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.232148] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.232323] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.232826] env[69982]: DEBUG nova.compute.manager [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] No waiting events found dispatching network-vif-unplugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1254.233369] env[69982]: WARNING nova.compute.manager [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received unexpected event network-vif-unplugged-f973e93a-f258-4ccd-a732-c323a3202bb3 for instance with vm_state shelved_offloaded and task_state None. [ 1254.233816] env[69982]: DEBUG nova.compute.manager [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1254.234146] env[69982]: DEBUG nova.compute.manager [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing instance network info cache due to event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1254.234474] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.234799] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.235106] env[69982]: DEBUG nova.network.neutron [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1254.346077] env[69982]: DEBUG oslo_vmware.api [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865605, 'name': ReconfigVM_Task, 'duration_secs': 5.756535} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.346546] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.346546] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfigured VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1254.486786] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524aefa8-2ad1-6dac-710f-4a3739023163, 'name': SearchDatastore_Task, 'duration_secs': 0.009246} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.487779] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cdc8720-f257-47a2-bfef-14bab03de755 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.494056] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1254.494056] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d67b6-fbf1-e945-3f61-5ed4b6a7303d" [ 1254.494056] env[69982]: _type = "Task" [ 1254.494056] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.504259] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d67b6-fbf1-e945-3f61-5ed4b6a7303d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.633787] env[69982]: DEBUG nova.objects.instance [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'migration_context' on Instance uuid 54b91e61-1302-40e6-b928-fcca31cd9b3a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1254.659680] env[69982]: DEBUG oslo_concurrency.lockutils [None req-6bdc3e59-31bd-419e-8c89-86800d3e8800 tempest-ServersTestMultiNic-425894379 tempest-ServersTestMultiNic-425894379-project-member] Lock "4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.367s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.009039] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]528d67b6-fbf1-e945-3f61-5ed4b6a7303d, 'name': SearchDatastore_Task, 'duration_secs': 0.03358} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.009039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.009039] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1255.009039] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-45d81551-fdb4-4070-b68d-b460098a22a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.020205] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1255.020205] env[69982]: value = "task-3865613" [ 1255.020205] env[69982]: _type = "Task" [ 1255.020205] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.029452] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.243935] env[69982]: DEBUG nova.compute.manager [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-deleted-138352cf-ac45-467a-99b2-d318216a2d53 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1255.244239] env[69982]: INFO nova.compute.manager [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Neutron deleted interface 138352cf-ac45-467a-99b2-d318216a2d53; detaching it from the instance and deleting it from the info cache [ 1255.244647] env[69982]: DEBUG nova.network.neutron [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "address": "fa:16:3e:b0:d6:d7", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfc8b183b-39", "ovs_interfaceid": "fc8b183b-39e0-493b-951b-cb7e8c614a94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.278397] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a6b083-e5db-4ace-9283-386511c3c68b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.288444] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6857356-8fdb-42dd-9da1-5336898d4f03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.336478] env[69982]: DEBUG nova.network.neutron [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updated VIF entry in instance network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1255.337094] env[69982]: DEBUG nova.network.neutron [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf973e93a-f2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.341619] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc9afed-85ba-4bd6-963f-7bfde8ff62e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.354565] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3854197e-46f8-4c73-bb55-8c647bdda9c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.374114] env[69982]: DEBUG nova.compute.provider_tree [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1255.533260] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865613, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.660933] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.661234] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.661469] env[69982]: DEBUG nova.network.neutron [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1255.748128] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1255.748323] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Acquired lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1255.749374] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77906069-efb6-4fc2-b5b8-b158b260033d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.770494] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f01924f-43a1-4f19-8bcc-3719ed2031dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.799162] env[69982]: DEBUG nova.virt.vmwareapi.vmops [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfiguring VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1255.799510] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0262bbaa-19e6-4207-b15e-558ac6a4db97 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.821563] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Waiting for the task: (returnval){ [ 1255.821563] env[69982]: value = "task-3865614" [ 1255.821563] env[69982]: _type = "Task" [ 1255.821563] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.833923] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.848911] env[69982]: DEBUG oslo_concurrency.lockutils [req-e4c1ef8c-127c-4f1a-a745-ed05c650b6ab req-4cf5f8ca-83df-4f72-bc83-7a579cad23f4 service nova] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1255.849578] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.899902] env[69982]: ERROR nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [req-ecb24ce7-9060-428b-a909-41f096bbcc1d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ecb24ce7-9060-428b-a909-41f096bbcc1d"}]} [ 1255.919997] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1255.937855] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1255.938081] env[69982]: DEBUG nova.compute.provider_tree [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1255.950340] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1255.972967] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1256.029944] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79794} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.032712] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1256.032948] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1256.033446] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0cf061f-ad50-4ca0-9e24-59c1fa685cd5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.041418] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1256.041418] env[69982]: value = "task-3865615" [ 1256.041418] env[69982]: _type = "Task" [ 1256.041418] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.053485] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865615, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.099316] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f51eb9e-dd3e-4ba9-b839-40e7dfb5f7ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.108792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bbb9c1-75a1-4a61-905e-fc1584643a9e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.141310] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f32ebad-42d6-4b28-b1c3-acd372922ce3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.150724] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36382fce-6a10-475c-8ae8-78e2cd064e66 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.155641] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.166680] env[69982]: DEBUG nova.compute.provider_tree [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1256.267107] env[69982]: DEBUG nova.compute.manager [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-deleted-fc8b183b-39e0-493b-951b-cb7e8c614a94 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1256.267329] env[69982]: INFO nova.compute.manager [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Neutron deleted interface fc8b183b-39e0-493b-951b-cb7e8c614a94; detaching it from the instance and deleting it from the info cache [ 1256.267630] env[69982]: DEBUG nova.network.neutron [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.332844] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.566584] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865615, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079823} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.567670] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1256.568556] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719aea49-2e1b-4a9b-a545-a74775098be0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.597626] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1256.598455] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5e716d7-d1cd-4c4a-93c4-a9517fab8b2d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.620199] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1256.620199] env[69982]: value = "task-3865616" [ 1256.620199] env[69982]: _type = "Task" [ 1256.620199] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.628992] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865616, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.654578] env[69982]: INFO nova.network.neutron [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Port fc8b183b-39e0-493b-951b-cb7e8c614a94 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1256.655132] env[69982]: DEBUG nova.network.neutron [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [{"id": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "address": "fa:16:3e:be:65:d9", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18ad43f6-28", "ovs_interfaceid": "18ad43f6-284f-4ffe-8c0c-638aa5dc1be9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.701299] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 168 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1256.701567] env[69982]: DEBUG nova.compute.provider_tree [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 168 to 169 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1256.701748] env[69982]: DEBUG nova.compute.provider_tree [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1256.770689] env[69982]: DEBUG oslo_concurrency.lockutils [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.833793] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.050594] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.050881] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.051114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.051306] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.051479] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.053874] env[69982]: INFO nova.compute.manager [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Terminating instance [ 1257.130404] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865616, 'name': ReconfigVM_Task, 'duration_secs': 0.31433} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.130697] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1257.131673] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7495ecec-9005-4d41-a843-f218c5cb8ae0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.138713] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1257.138713] env[69982]: value = "task-3865617" [ 1257.138713] env[69982]: _type = "Task" [ 1257.138713] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.147469] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865617, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.158402] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.333167] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.561066] env[69982]: DEBUG nova.compute.manager [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1257.561066] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1257.561066] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367a4a7e-9683-41d1-90ec-9977dd6351fd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.568517] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1257.568892] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2e435f51-7941-4956-81c1-5ca971d1f68a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.575208] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1257.575208] env[69982]: value = "task-3865618" [ 1257.575208] env[69982]: _type = "Task" [ 1257.575208] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.583917] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865618, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.653349] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865617, 'name': Rename_Task, 'duration_secs': 0.180297} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.653349] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1257.653349] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7c16dec-3f4d-49b5-9728-51876cdf1c72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.663864] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bf2d8bdd-cb05-4e04-bc0c-987e188a43b6 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-36b7f89e-7552-40b9-ada4-01abfcea8310-fc8b183b-39e0-493b-951b-cb7e8c614a94" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.951s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.665103] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1257.665103] env[69982]: value = "task-3865619" [ 1257.665103] env[69982]: _type = "Task" [ 1257.665103] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.675186] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.714681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.584s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.724539] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.604s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.725035] env[69982]: DEBUG nova.objects.instance [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'resources' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1257.834606] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.086836] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865618, 'name': PowerOffVM_Task, 'duration_secs': 0.197383} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.087221] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1258.087406] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1258.087678] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25973907-5a82-4590-ac02-dbadee55a3e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.154915] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1258.155169] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1258.155356] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleting the datastore file [datastore2] 31f56d0e-7c64-4fe3-917e-7ebb814ae924 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1258.155677] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a36968c-06f6-4a05-8a9e-68bc0cc2f071 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.163066] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for the task: (returnval){ [ 1258.163066] env[69982]: value = "task-3865621" [ 1258.163066] env[69982]: _type = "Task" [ 1258.163066] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.175341] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865621, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.178874] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865619, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.227901] env[69982]: DEBUG nova.objects.instance [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'numa_topology' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1258.335197] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.681757] env[69982]: DEBUG oslo_vmware.api [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Task: {'id': task-3865621, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139294} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.684751] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1258.684962] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1258.685165] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1258.685412] env[69982]: INFO nova.compute.manager [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1258.685686] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1258.685892] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865619, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.686124] env[69982]: DEBUG nova.compute.manager [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1258.686222] env[69982]: DEBUG nova.network.neutron [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1258.730740] env[69982]: DEBUG nova.objects.base [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Object Instance<7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f> lazy-loaded attributes: resources,numa_topology {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1258.838099] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.871222] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1954d471-6e2d-4a95-bac3-453ade344d00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.879546] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb723c1-7bad-481e-9537-c8870ab7b7d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.910981] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69cd3fe-04ca-48d2-9900-4fb2977ad0b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.925039] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9b587b-1cca-4faa-8a0b-5306a42377b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.939249] env[69982]: DEBUG nova.compute.provider_tree [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1258.975460] env[69982]: DEBUG nova.compute.manager [req-7d8d0b7d-e2a5-40d0-a9aa-28feabb17f5e req-cf0b0f4d-8f14-4cb8-bbcc-84072c0202b0 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Received event network-vif-deleted-ff8d3aec-2392-4a4a-80c2-aa0499153235 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1258.975460] env[69982]: INFO nova.compute.manager [req-7d8d0b7d-e2a5-40d0-a9aa-28feabb17f5e req-cf0b0f4d-8f14-4cb8-bbcc-84072c0202b0 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Neutron deleted interface ff8d3aec-2392-4a4a-80c2-aa0499153235; detaching it from the instance and deleting it from the info cache [ 1258.975460] env[69982]: DEBUG nova.network.neutron [req-7d8d0b7d-e2a5-40d0-a9aa-28feabb17f5e req-cf0b0f4d-8f14-4cb8-bbcc-84072c0202b0 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.181287] env[69982]: DEBUG oslo_vmware.api [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865619, 'name': PowerOnVM_Task, 'duration_secs': 1.466071} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.181591] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.181824] env[69982]: INFO nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Took 8.40 seconds to spawn the instance on the hypervisor. [ 1259.182059] env[69982]: DEBUG nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1259.183199] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a0ad666-ad12-494d-8316-996809819944 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.267466] env[69982]: INFO nova.compute.manager [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Swapping old allocation on dict_keys(['206a5498-2e79-46c1-a636-9488a05fb67d']) held by migration 5dfa3168-4529-4db8-94b0-350d18c0b2ae for instance [ 1259.290812] env[69982]: DEBUG nova.scheduler.client.report [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Overwriting current allocation {'allocations': {'206a5498-2e79-46c1-a636-9488a05fb67d': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 169}}, 'project_id': 'efe64e69253d49a6a1146f240506ce39', 'user_id': 'd08d17e3ac5f40c6890dc8dcc4c559d3', 'consumer_generation': 1} on consumer 54b91e61-1302-40e6-b928-fcca31cd9b3a {{(pid=69982) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1259.337057] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.367759] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.367968] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.368162] env[69982]: DEBUG nova.network.neutron [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1259.442778] env[69982]: DEBUG nova.scheduler.client.report [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1259.449684] env[69982]: DEBUG nova.network.neutron [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.479280] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5be5f6df-ca38-493e-bd09-4bf4b90d00ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.489460] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b805187c-53a6-475b-b73f-d40f078cb1db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.519611] env[69982]: DEBUG nova.compute.manager [req-7d8d0b7d-e2a5-40d0-a9aa-28feabb17f5e req-cf0b0f4d-8f14-4cb8-bbcc-84072c0202b0 service nova] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Detach interface failed, port_id=ff8d3aec-2392-4a4a-80c2-aa0499153235, reason: Instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1259.700990] env[69982]: INFO nova.compute.manager [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Took 13.28 seconds to build instance. [ 1259.838126] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.948280] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.952183] env[69982]: INFO nova.compute.manager [-] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Took 1.27 seconds to deallocate network for instance. [ 1260.156161] env[69982]: DEBUG nova.network.neutron [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [{"id": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "address": "fa:16:3e:d9:d2:de", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98b299b9-34", "ovs_interfaceid": "98b299b9-3425-43e1-95bf-4acd909b7ad4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.201970] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f5b569a-8c69-4fb6-8fa5-0f26765da925 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.791s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.338702] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.440974] env[69982]: DEBUG nova.compute.manager [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1260.441189] env[69982]: DEBUG nova.compute.manager [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing instance network info cache due to event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1260.441409] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1260.441552] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1260.441716] env[69982]: DEBUG nova.network.neutron [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1260.457594] env[69982]: DEBUG oslo_concurrency.lockutils [None req-da108b39-fcf5-4921-9be5-a5ec30e142ef tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.111s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.459052] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.459193] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.459393] env[69982]: DEBUG nova.objects.instance [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lazy-loading 'resources' on Instance uuid 31f56d0e-7c64-4fe3-917e-7ebb814ae924 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.460192] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 25.302s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.460375] env[69982]: INFO nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] During sync_power_state the instance has a pending task (shelving). Skip. [ 1260.460547] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.461269] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.306s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.461440] env[69982]: INFO nova.compute.manager [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Unshelving [ 1260.658704] env[69982]: DEBUG oslo_concurrency.lockutils [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-54b91e61-1302-40e6-b928-fcca31cd9b3a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.659207] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.659501] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98dc2020-385a-4c8b-a265-c130f90b982b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.667728] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1260.667728] env[69982]: value = "task-3865622" [ 1260.667728] env[69982]: _type = "Task" [ 1260.667728] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.677377] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865622, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.838904] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.078043] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f562f89-01cc-474c-960d-0376607b946e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.086428] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b032eeac-974f-4dcd-ad6b-2a62d849ad99 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.120640] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c7fd117-994b-4efb-b5c3-13ae86a0e10b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.128834] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f990bdaf-3092-47c5-b6f5-8fc624b78275 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.142798] env[69982]: DEBUG nova.compute.provider_tree [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1261.179562] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865622, 'name': PowerOffVM_Task, 'duration_secs': 0.226952} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.179562] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.180088] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.180308] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.180463] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.180641] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.180786] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.180955] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.181138] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.181293] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.181452] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.181860] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.181860] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.189410] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ab97ee8-7a1e-47c1-a1d4-5bfdc3587c7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.206076] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1261.206076] env[69982]: value = "task-3865623" [ 1261.206076] env[69982]: _type = "Task" [ 1261.206076] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.214974] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.233775] env[69982]: DEBUG nova.network.neutron [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updated VIF entry in instance network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1261.234316] env[69982]: DEBUG nova.network.neutron [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.339834] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.473560] env[69982]: DEBUG nova.compute.utils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1261.646950] env[69982]: DEBUG nova.scheduler.client.report [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1261.716377] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865623, 'name': ReconfigVM_Task, 'duration_secs': 0.139418} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.717681] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824601df-c859-4717-bd84-2776f28f0c4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.737115] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1c25774-9b08-4c79-92c4-169b2ad95ee0 req-0b38fc87-752a-4ddf-bf89-806f749ec448 service nova] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.738061] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1261.738297] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1261.738453] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1261.738637] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1261.738783] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1261.738932] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1261.739154] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1261.739319] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1261.739500] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1261.739665] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1261.739858] env[69982]: DEBUG nova.virt.hardware [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1261.740676] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e03a6d36-1915-44f5-9441-98b038f7ffda {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.747210] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1261.747210] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a91cf-e63c-d365-27d5-3ddc0684c03b" [ 1261.747210] env[69982]: _type = "Task" [ 1261.747210] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.756139] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a91cf-e63c-d365-27d5-3ddc0684c03b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.839946] env[69982]: DEBUG oslo_vmware.api [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Task: {'id': task-3865614, 'name': ReconfigVM_Task, 'duration_secs': 5.769396} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.840302] env[69982]: DEBUG oslo_concurrency.lockutils [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] Releasing lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1261.840412] env[69982]: DEBUG nova.virt.vmwareapi.vmops [req-d5d9ca12-6ba9-4b05-9000-fdfa4ab7d7d4 req-8aea3fed-69aa-41b4-9b23-28e9aa92926a service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Reconfigured VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1261.840889] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.991s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.841145] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.841354] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.841520] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.843626] env[69982]: INFO nova.compute.manager [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Terminating instance [ 1261.977185] env[69982]: INFO nova.virt.block_device [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Booting with volume faf3518e-f73e-4b06-a226-42e4c81f1235 at /dev/sdb [ 1262.016133] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f11e8e5-34d9-4e0b-8342-d6b444c91566 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.026126] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4aacd1a-6449-4f14-b6f3-e58ac5cae3af {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.055470] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8f90a3bf-0004-4fd4-84c6-25f71b4c6c7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.065074] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36583bde-e07d-46d4-9430-a7eeaec4f766 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.096803] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-147f69d7-f575-45a4-ab66-d7eefdfc74e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.103548] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576c8c3c-9405-4d8d-9b32-4a913211a9f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.117836] env[69982]: DEBUG nova.virt.block_device [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating existing volume attachment record: f4066339-102a-4787-bcf5-5821d5a49157 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1262.151399] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.692s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.173090] env[69982]: INFO nova.scheduler.client.report [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Deleted allocations for instance 31f56d0e-7c64-4fe3-917e-7ebb814ae924 [ 1262.260124] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520a91cf-e63c-d365-27d5-3ddc0684c03b, 'name': SearchDatastore_Task, 'duration_secs': 0.008003} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.269406] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1262.269862] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d148a73-063b-46db-a164-f7908c75e3be {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.301099] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1262.301099] env[69982]: value = "task-3865625" [ 1262.301099] env[69982]: _type = "Task" [ 1262.301099] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.310824] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865625, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.347964] env[69982]: DEBUG nova.compute.manager [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1262.348521] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1262.349670] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b5f13e-164c-49f0-8d8d-2011d08c904f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.358426] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1262.358781] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2552ca98-2262-407a-83f0-b4ab6d80cd30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.365656] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1262.365656] env[69982]: value = "task-3865626" [ 1262.365656] env[69982]: _type = "Task" [ 1262.365656] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.375833] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.681837] env[69982]: DEBUG oslo_concurrency.lockutils [None req-808afaeb-fc76-4d20-81be-596f435e346f tempest-ServerActionsTestOtherA-1666177876 tempest-ServerActionsTestOtherA-1666177876-project-member] Lock "31f56d0e-7c64-4fe3-917e-7ebb814ae924" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.631s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.811155] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865625, 'name': ReconfigVM_Task, 'duration_secs': 0.228944} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.811449] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1262.812257] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd58b6f-feb1-4562-8f07-9c2cc46d90ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.835392] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1262.836055] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1695129e-a7ae-4063-bb6d-de306b53830c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.854995] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1262.854995] env[69982]: value = "task-3865629" [ 1262.854995] env[69982]: _type = "Task" [ 1262.854995] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.863145] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.875094] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865626, 'name': PowerOffVM_Task, 'duration_secs': 0.322067} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.875519] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.875717] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.876024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ae1b7e4-4746-465b-9498-db6d71fbbcad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.945311] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.945654] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.945907] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleting the datastore file [datastore1] 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.946304] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03509f66-f3d4-4b14-90f0-4e429cdb4933 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.957028] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1262.957028] env[69982]: value = "task-3865631" [ 1262.957028] env[69982]: _type = "Task" [ 1262.957028] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.967065] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865631, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.366023] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865629, 'name': ReconfigVM_Task, 'duration_secs': 0.271303} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.366296] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a/54b91e61-1302-40e6-b928-fcca31cd9b3a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1263.367145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91328ea-32fa-47ef-917a-bbf06c211305 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.385686] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f1b14e-d95d-4494-8d4d-dc3f32b94b67 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.404784] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb48d83-2646-404b-b944-1f40d52efe6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.425187] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c176ff74-dabe-486a-b2ba-63948aed0a12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.432911] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1263.433193] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0784486-15f6-48c5-b3d6-4695cc8c8f39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.440816] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1263.440816] env[69982]: value = "task-3865632" [ 1263.440816] env[69982]: _type = "Task" [ 1263.440816] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.449471] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1263.468253] env[69982]: DEBUG oslo_vmware.api [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865631, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166898} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1263.468526] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1263.468714] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1263.469398] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1263.469398] env[69982]: INFO nova.compute.manager [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1263.469398] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1263.469564] env[69982]: DEBUG nova.compute.manager [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1263.469662] env[69982]: DEBUG nova.network.neutron [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1263.863994] env[69982]: DEBUG nova.compute.manager [req-4a65f494-debc-4f43-a75b-f6ca1448659c req-327fc66e-03d1-4387-8be6-7b1f3fc8282c service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Received event network-vif-deleted-18ad43f6-284f-4ffe-8c0c-638aa5dc1be9 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1263.864292] env[69982]: INFO nova.compute.manager [req-4a65f494-debc-4f43-a75b-f6ca1448659c req-327fc66e-03d1-4387-8be6-7b1f3fc8282c service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Neutron deleted interface 18ad43f6-284f-4ffe-8c0c-638aa5dc1be9; detaching it from the instance and deleting it from the info cache [ 1263.864722] env[69982]: DEBUG nova.network.neutron [req-4a65f494-debc-4f43-a75b-f6ca1448659c req-327fc66e-03d1-4387-8be6-7b1f3fc8282c service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.952498] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865632, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.345964] env[69982]: DEBUG nova.network.neutron [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.366754] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33e5fa89-5d50-40c3-be99-253e75241417 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.377373] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f419f9d4-2e46-4bc6-9074-01af4d6ed408 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.408533] env[69982]: DEBUG nova.compute.manager [req-4a65f494-debc-4f43-a75b-f6ca1448659c req-327fc66e-03d1-4387-8be6-7b1f3fc8282c service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Detach interface failed, port_id=18ad43f6-284f-4ffe-8c0c-638aa5dc1be9, reason: Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1264.451514] env[69982]: DEBUG oslo_vmware.api [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865632, 'name': PowerOnVM_Task, 'duration_secs': 1.008212} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.451514] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1264.848687] env[69982]: INFO nova.compute.manager [-] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Took 1.38 seconds to deallocate network for instance. [ 1265.355140] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.355439] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.355680] env[69982]: DEBUG nova.objects.instance [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'resources' on Instance uuid 36b7f89e-7552-40b9-ada4-01abfcea8310 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1265.462143] env[69982]: INFO nova.compute.manager [None req-10d0c072-c272-4541-85b4-5400eea63f94 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance to original state: 'active' [ 1265.958790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8e2731-454d-479c-9e86-8a8a6fea0829 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.970145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf001ff-ff33-4566-a12d-e9cf65bcde3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.010356] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5139c3ad-dbf1-4e0a-af22-e6dfee6fb350 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.019233] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4fe585-9f83-4c41-8ae6-63072d90c1d5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.036750] env[69982]: DEBUG nova.compute.provider_tree [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.512197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.512559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.539746] env[69982]: DEBUG nova.scheduler.client.report [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1266.580282] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.580586] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.580874] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.581160] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.581403] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.583771] env[69982]: INFO nova.compute.manager [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Terminating instance [ 1266.746384] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.746557] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.014869] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1267.045370] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.690s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.065633] env[69982]: INFO nova.scheduler.client.report [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted allocations for instance 36b7f89e-7552-40b9-ada4-01abfcea8310 [ 1267.088717] env[69982]: DEBUG nova.compute.manager [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1267.089206] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1267.090853] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923fe025-1834-4964-bc26-eac482512e64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.100893] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1267.101206] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0255f4b-e44e-41b3-8b86-9ab55cf5875f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.108481] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1267.108481] env[69982]: value = "task-3865634" [ 1267.108481] env[69982]: _type = "Task" [ 1267.108481] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.119090] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865634, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.248852] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1267.533932] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.534300] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.535863] env[69982]: INFO nova.compute.claims [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1267.574324] env[69982]: DEBUG oslo_concurrency.lockutils [None req-871ce784-378f-4598-9459-49e87d1daad3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "36b7f89e-7552-40b9-ada4-01abfcea8310" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.733s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.575721] env[69982]: DEBUG oslo_concurrency.lockutils [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] Acquired lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1267.576638] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-866bf307-ddb1-4b04-8d0f-ccb281b6abd7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.588092] env[69982]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1267.588264] env[69982]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69982) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1267.588680] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76af9a1f-c34a-4704-9b2a-616b3163762e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.597723] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9e05e0-e586-44e5-a327-3ff9118ca90a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.617323] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865634, 'name': PowerOffVM_Task, 'duration_secs': 0.217652} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.617573] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1267.617756] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1267.618093] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a888624-37a7-4bc1-9b20-69afd51c078f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.628335] env[69982]: ERROR root [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-768094' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-768094' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-768094' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-768094'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-768094' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-768094' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-768094'}\n"]: nova.exception.InstanceNotFound: Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 could not be found. [ 1267.628545] env[69982]: DEBUG oslo_concurrency.lockutils [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] Releasing lock "36b7f89e-7552-40b9-ada4-01abfcea8310" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1267.628747] env[69982]: DEBUG nova.compute.manager [req-e89b66c1-c93c-4b85-a2ad-5d7457e9de61 req-a6c7fda1-5f34-4e2f-8d94-510a29726bc5 service nova] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Detach interface failed, port_id=fc8b183b-39e0-493b-951b-cb7e8c614a94, reason: Instance 36b7f89e-7552-40b9-ada4-01abfcea8310 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1267.687571] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1267.687819] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1267.688014] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleting the datastore file [datastore2] 54b91e61-1302-40e6-b928-fcca31cd9b3a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1267.688339] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a68093e1-a398-48c8-b3f2-61d01986018b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.695801] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1267.695801] env[69982]: value = "task-3865636" [ 1267.695801] env[69982]: _type = "Task" [ 1267.695801] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1267.706032] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865636, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.768931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.208791] env[69982]: DEBUG oslo_vmware.api [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865636, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161561} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.209122] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1268.209346] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1268.209553] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1268.209962] env[69982]: INFO nova.compute.manager [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1268.210061] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1268.210280] env[69982]: DEBUG nova.compute.manager [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1268.210401] env[69982]: DEBUG nova.network.neutron [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1268.236461] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1268.655112] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c035f67-9c1f-425b-8f89-699ca570b4b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.662410] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43143f7b-fd8f-4a80-90af-2113decbda8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.700099] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a296ec4-f15d-41e9-a0b8-bcb25b286358 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.710082] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb6978d-156f-4947-ad62-c7f1974c9a4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.727515] env[69982]: DEBUG nova.compute.provider_tree [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1268.954051] env[69982]: DEBUG nova.compute.manager [req-bcf8ca7e-0bdc-4f91-a3db-edf4bc5eaf8b req-2771db82-b1e7-491a-881f-8d50ef3982f5 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Received event network-vif-deleted-98b299b9-3425-43e1-95bf-4acd909b7ad4 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1268.954295] env[69982]: INFO nova.compute.manager [req-bcf8ca7e-0bdc-4f91-a3db-edf4bc5eaf8b req-2771db82-b1e7-491a-881f-8d50ef3982f5 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Neutron deleted interface 98b299b9-3425-43e1-95bf-4acd909b7ad4; detaching it from the instance and deleting it from the info cache [ 1268.954465] env[69982]: DEBUG nova.network.neutron [req-bcf8ca7e-0bdc-4f91-a3db-edf4bc5eaf8b req-2771db82-b1e7-491a-881f-8d50ef3982f5 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.232411] env[69982]: DEBUG nova.scheduler.client.report [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1269.440265] env[69982]: DEBUG nova.network.neutron [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1269.456834] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bec2b2f-ce93-4a23-8d43-551d3a86d445 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.467957] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1270f3-9a50-4ad8-972a-afb2e5104bee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.497971] env[69982]: DEBUG nova.compute.manager [req-bcf8ca7e-0bdc-4f91-a3db-edf4bc5eaf8b req-2771db82-b1e7-491a-881f-8d50ef3982f5 service nova] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Detach interface failed, port_id=98b299b9-3425-43e1-95bf-4acd909b7ad4, reason: Instance 54b91e61-1302-40e6-b928-fcca31cd9b3a could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1269.740929] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.206s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.741524] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1269.744131] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.975s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.745581] env[69982]: INFO nova.compute.claims [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1269.945765] env[69982]: INFO nova.compute.manager [-] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Took 1.74 seconds to deallocate network for instance. [ 1270.043066] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.043340] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.249930] env[69982]: DEBUG nova.compute.utils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1270.251438] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1270.255039] env[69982]: DEBUG oslo_concurrency.lockutils [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.255268] env[69982]: DEBUG oslo_concurrency.lockutils [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.454079] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1270.546427] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1270.753397] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1270.759842] env[69982]: INFO nova.compute.manager [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Detaching volume 6f459808-fc05-4a22-b8c6-83bd72fd1939 [ 1270.797128] env[69982]: INFO nova.virt.block_device [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Attempting to driver detach volume 6f459808-fc05-4a22-b8c6-83bd72fd1939 from mountpoint /dev/sdb [ 1270.797375] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1270.797572] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768107', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'name': 'volume-6f459808-fc05-4a22-b8c6-83bd72fd1939', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ce24e165-230a-44bb-ae46-d1479e71585a', 'attached_at': '', 'detached_at': '', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'serial': '6f459808-fc05-4a22-b8c6-83bd72fd1939'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1270.798728] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb2355f-aaf3-4351-a4ce-43cf7f1fc8f8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.827510] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149c59dc-efef-4627-8ba8-692e75d7d289 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.834889] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1527d62-a847-451a-b3b0-924362462c55 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.856844] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe53208-f3c6-4db4-bad8-77b346d0d790 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.871678] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] The volume has not been displaced from its original location: [datastore1] volume-6f459808-fc05-4a22-b8c6-83bd72fd1939/volume-6f459808-fc05-4a22-b8c6-83bd72fd1939.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1270.877015] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1270.880067] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b99be96-a71e-4fcc-9934-bb517b263a00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.898923] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1270.898923] env[69982]: value = "task-3865637" [ 1270.898923] env[69982]: _type = "Task" [ 1270.898923] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.910972] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865637, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.936495] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46d043d0-533f-4e60-b835-385475aacc37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.945824] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a48bb4-0c1d-479d-9330-bd4095531b6f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.978638] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b019c6fe-58cc-4b33-855a-c2b03156883d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.987130] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf53832-aff4-42ef-b8de-7c379dde3503 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.001476] env[69982]: DEBUG nova.compute.provider_tree [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1271.067070] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1271.409713] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865637, 'name': ReconfigVM_Task, 'duration_secs': 0.21698} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.409999] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1271.414613] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cf87e51-bcdf-4969-b1e8-acca186aafb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.430185] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1271.430185] env[69982]: value = "task-3865638" [ 1271.430185] env[69982]: _type = "Task" [ 1271.430185] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.438746] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.505510] env[69982]: DEBUG nova.scheduler.client.report [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1271.766842] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1271.793831] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1271.794112] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1271.794270] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1271.794455] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1271.794594] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1271.794808] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1271.795051] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1271.795217] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1271.795384] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1271.795549] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1271.795833] env[69982]: DEBUG nova.virt.hardware [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1271.796792] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebba5a6-7e2a-458f-95ca-088c8aeaa0a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.805562] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828f0b42-3cc2-4b5e-9175-171b3bcd0a09 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.819769] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1271.825488] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Creating folder: Project (83f46e3824734c7f9c19aba3f32ca71b). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1271.825812] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-53b1c18c-9d71-4932-a3f9-713187a9cf70 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.835621] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Created folder: Project (83f46e3824734c7f9c19aba3f32ca71b) in parent group-v767796. [ 1271.835826] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Creating folder: Instances. Parent ref: group-v768115. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1271.836107] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a79662b-7072-473f-99f2-ac90b5e7b8d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.845417] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Created folder: Instances in parent group-v768115. [ 1271.845679] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1271.845875] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1271.846110] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abf4c98c-298d-4297-9e88-bbc9e3f59ff6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.862674] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1271.862674] env[69982]: value = "task-3865641" [ 1271.862674] env[69982]: _type = "Task" [ 1271.862674] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.870268] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865641, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.940958] env[69982]: DEBUG oslo_vmware.api [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865638, 'name': ReconfigVM_Task, 'duration_secs': 0.146391} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.941276] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768107', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'name': 'volume-6f459808-fc05-4a22-b8c6-83bd72fd1939', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ce24e165-230a-44bb-ae46-d1479e71585a', 'attached_at': '', 'detached_at': '', 'volume_id': '6f459808-fc05-4a22-b8c6-83bd72fd1939', 'serial': '6f459808-fc05-4a22-b8c6-83bd72fd1939'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1272.011396] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.267s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1272.012113] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1272.015168] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.779s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1272.015455] env[69982]: DEBUG nova.objects.instance [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'pci_requests' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.373233] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865641, 'name': CreateVM_Task, 'duration_secs': 0.251026} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.373432] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1272.373855] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.374027] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.374364] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1272.374619] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fda0c0c-1ec5-46de-8be7-4d8b43194722 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.379062] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1272.379062] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232cddf-1e59-e45a-a8d1-d0ce88bdd98f" [ 1272.379062] env[69982]: _type = "Task" [ 1272.379062] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.386574] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232cddf-1e59-e45a-a8d1-d0ce88bdd98f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.484594] env[69982]: DEBUG nova.objects.instance [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.518414] env[69982]: DEBUG nova.compute.utils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1272.521282] env[69982]: DEBUG nova.objects.instance [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'numa_topology' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1272.522395] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Not allocating networking since 'none' was specified. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1272.890075] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5232cddf-1e59-e45a-a8d1-d0ce88bdd98f, 'name': SearchDatastore_Task, 'duration_secs': 0.009897} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.890427] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.890674] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1272.890959] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.891158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.891404] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1272.891679] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-61223f78-bedc-4c34-ad1d-0d49a9597e80 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.900266] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1272.900460] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1272.901191] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a163277-bd75-4eb6-98d3-8d031aa40c03 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.906790] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1272.906790] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab3656-449f-1372-8017-83bd9bb275e3" [ 1272.906790] env[69982]: _type = "Task" [ 1272.906790] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.915330] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab3656-449f-1372-8017-83bd9bb275e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.023245] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1273.026328] env[69982]: INFO nova.compute.claims [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1273.417930] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ab3656-449f-1372-8017-83bd9bb275e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009191} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.418711] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66bbe483-5fae-41cd-bb39-23391b1ec65d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.423807] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1273.423807] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524de1b1-15fa-d137-db0e-6cb22421f3d9" [ 1273.423807] env[69982]: _type = "Task" [ 1273.423807] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.431598] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524de1b1-15fa-d137-db0e-6cb22421f3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.439096] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.492422] env[69982]: DEBUG oslo_concurrency.lockutils [None req-244a4a3a-73a2-4ad9-8632-ed08165a1581 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.237s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.493681] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.055s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.493887] env[69982]: DEBUG nova.compute.manager [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.495013] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614d0dad-16ab-4fbc-99d8-de3eb8533348 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.501617] env[69982]: DEBUG nova.compute.manager [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1273.502228] env[69982]: DEBUG nova.objects.instance [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.935577] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524de1b1-15fa-d137-db0e-6cb22421f3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009776} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1273.936021] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1273.936245] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ffb343e2-b631-4ff8-9da4-e07462d126c7/ffb343e2-b631-4ff8-9da4-e07462d126c7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1273.936561] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f5fd784-7096-484f-9202-d1e7eb950b5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.943263] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1273.943263] env[69982]: value = "task-3865642" [ 1273.943263] env[69982]: _type = "Task" [ 1273.943263] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1273.951581] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.035634] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1274.066046] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1274.066323] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1274.066479] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1274.066658] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1274.066803] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1274.066949] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1274.067209] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1274.067410] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1274.067593] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1274.067762] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1274.067946] env[69982]: DEBUG nova.virt.hardware [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1274.068831] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90547693-19e9-4dc5-85d1-8ca09b4427d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.079104] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b1151e-277d-4f0f-9b56-b24fe2aec0cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.094656] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1274.100520] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1274.103528] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1274.103988] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7deb5f98-40e4-4ae8-929c-79abcb322e4c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.126330] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1274.126330] env[69982]: value = "task-3865643" [ 1274.126330] env[69982]: _type = "Task" [ 1274.126330] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.138271] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865643, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.185935] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a9d08e-e773-4eca-ac44-29d031fafd85 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.196508] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7c176e-2c29-4c74-b8de-16645f383ffa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.229409] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765e46f1-747d-4532-a928-5f1069acfa3f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.239273] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a83c654-57e0-435c-af1d-c305e6b2adb9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.256871] env[69982]: DEBUG nova.compute.provider_tree [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1274.454586] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482529} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.454850] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] ffb343e2-b631-4ff8-9da4-e07462d126c7/ffb343e2-b631-4ff8-9da4-e07462d126c7.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1274.455093] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1274.455358] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-11082b39-ae8d-46a9-a937-1dd02eb11d88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.462115] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1274.462115] env[69982]: value = "task-3865644" [ 1274.462115] env[69982]: _type = "Task" [ 1274.462115] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.471234] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.510298] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.510630] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c44b93b-35d1-4dc4-90d7-17f9a556d618 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.518800] env[69982]: DEBUG oslo_vmware.api [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1274.518800] env[69982]: value = "task-3865645" [ 1274.518800] env[69982]: _type = "Task" [ 1274.518800] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.529249] env[69982]: DEBUG oslo_vmware.api [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.636232] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865643, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.761047] env[69982]: DEBUG nova.scheduler.client.report [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.972995] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071933} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.973307] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1274.974107] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e11a1ae-fb9f-4082-b3ae-ff62dc05b499 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.994300] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] ffb343e2-b631-4ff8-9da4-e07462d126c7/ffb343e2-b631-4ff8-9da4-e07462d126c7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1274.994981] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fceb8002-9b90-4eb7-99c4-bc05b17faa3e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.015386] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1275.015386] env[69982]: value = "task-3865646" [ 1275.015386] env[69982]: _type = "Task" [ 1275.015386] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.025120] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.030543] env[69982]: DEBUG oslo_vmware.api [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.136863] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865643, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.265984] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.251s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.268350] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.814s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.268571] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.270393] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.203s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.272021] env[69982]: INFO nova.compute.claims [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1275.294478] env[69982]: INFO nova.scheduler.client.report [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted allocations for instance 54b91e61-1302-40e6-b928-fcca31cd9b3a [ 1275.306662] env[69982]: INFO nova.network.neutron [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating port f973e93a-f258-4ccd-a732-c323a3202bb3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1275.532203] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865646, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.536173] env[69982]: DEBUG oslo_vmware.api [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.637680] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865643, 'name': CreateVM_Task, 'duration_secs': 1.451361} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.637862] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1275.638329] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.638762] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.638843] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1275.639099] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-502be26f-5897-48ad-ae29-cc7f4bf08aa2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.644431] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1275.644431] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521bd599-b98e-a82d-f90c-c8ebbfd55da5" [ 1275.644431] env[69982]: _type = "Task" [ 1275.644431] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.652972] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521bd599-b98e-a82d-f90c-c8ebbfd55da5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.801668] env[69982]: DEBUG oslo_concurrency.lockutils [None req-60b5fd80-cce0-4ac1-8e4e-51241ba152e8 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "54b91e61-1302-40e6-b928-fcca31cd9b3a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.221s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.027524] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865646, 'name': ReconfigVM_Task, 'duration_secs': 0.785177} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.028201] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Reconfigured VM instance instance-00000077 to attach disk [datastore2] ffb343e2-b631-4ff8-9da4-e07462d126c7/ffb343e2-b631-4ff8-9da4-e07462d126c7.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1276.028840] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33740b8b-e751-4472-a99e-9eb83d6d6384 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.034975] env[69982]: DEBUG oslo_vmware.api [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865645, 'name': PowerOffVM_Task, 'duration_secs': 1.254064} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.034975] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.034975] env[69982]: DEBUG nova.compute.manager [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1276.034975] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffe4f85-ff50-45cd-a8bd-436575c855cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.038673] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1276.038673] env[69982]: value = "task-3865647" [ 1276.038673] env[69982]: _type = "Task" [ 1276.038673] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.050268] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865647, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.155970] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]521bd599-b98e-a82d-f90c-c8ebbfd55da5, 'name': SearchDatastore_Task, 'duration_secs': 0.012487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.156311] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.156599] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1276.156876] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.157121] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.157239] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1276.157511] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79256488-7544-4354-b923-2bc2a781a2f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.167329] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1276.167537] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1276.168317] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ae9c947-9db8-4f3b-a6f9-04137d6cba65 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.174834] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1276.174834] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a647fb-e407-e90c-e58f-b9821f615683" [ 1276.174834] env[69982]: _type = "Task" [ 1276.174834] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.182805] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a647fb-e407-e90c-e58f-b9821f615683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.386650] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125f1923-f28b-48ed-a895-6f59e3bf8431 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.394919] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb62b02d-5488-4f35-ad64-39700d8c7700 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.427441] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b8a82f-5277-484b-8dfd-077368bdb97a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.435613] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785cf47c-c64d-4920-ae06-d7c8403d082f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.449796] env[69982]: DEBUG nova.compute.provider_tree [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.552239] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865647, 'name': Rename_Task, 'duration_secs': 0.133639} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.552513] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1276.552765] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a943ce34-d806-45f1-8879-f812f02104ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.554945] env[69982]: DEBUG oslo_concurrency.lockutils [None req-17b8ce92-d4fd-4ed7-b34b-65bbd83e3252 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.061s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.561658] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1276.561658] env[69982]: value = "task-3865648" [ 1276.561658] env[69982]: _type = "Task" [ 1276.561658] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.571292] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.686416] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52a647fb-e407-e90c-e58f-b9821f615683, 'name': SearchDatastore_Task, 'duration_secs': 0.00919} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.687295] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cc07184-42cb-42a0-bb85-708db3872bdf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.692971] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1276.692971] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526fe2a9-68a1-c4c4-ee8c-2768527f47a8" [ 1276.692971] env[69982]: _type = "Task" [ 1276.692971] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.701946] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526fe2a9-68a1-c4c4-ee8c-2768527f47a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.760888] env[69982]: DEBUG nova.compute.manager [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1276.761121] env[69982]: DEBUG oslo_concurrency.lockutils [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.761334] env[69982]: DEBUG oslo_concurrency.lockutils [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1276.761501] env[69982]: DEBUG oslo_concurrency.lockutils [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1276.761702] env[69982]: DEBUG nova.compute.manager [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] No waiting events found dispatching network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1276.761817] env[69982]: WARNING nova.compute.manager [req-c29585d5-0df0-4558-8707-6e32ccfbb607 req-4842cca7-6508-4290-87c0-c1ad4781ea7d service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received unexpected event network-vif-plugged-f973e93a-f258-4ccd-a732-c323a3202bb3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1276.848494] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.848699] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.848862] env[69982]: DEBUG nova.network.neutron [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1276.953193] env[69982]: DEBUG nova.scheduler.client.report [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1276.977850] env[69982]: DEBUG nova.objects.instance [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.070108] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.070562] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.076852] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865648, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.204238] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526fe2a9-68a1-c4c4-ee8c-2768527f47a8, 'name': SearchDatastore_Task, 'duration_secs': 0.00975} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.204543] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.205013] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1277.205814] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b7d9458-ca21-4fac-ae3d-e2824e69ad38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.213747] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1277.213747] env[69982]: value = "task-3865649" [ 1277.213747] env[69982]: _type = "Task" [ 1277.213747] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.222930] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.459609] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.189s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.460169] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1277.488204] env[69982]: DEBUG oslo_concurrency.lockutils [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.488438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.488631] env[69982]: DEBUG nova.network.neutron [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1277.488831] env[69982]: DEBUG nova.objects.instance [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'info_cache' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.577438] env[69982]: DEBUG oslo_vmware.api [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865648, 'name': PowerOnVM_Task, 'duration_secs': 0.572115} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.577438] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1277.577438] env[69982]: INFO nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1277.577438] env[69982]: DEBUG nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1277.577438] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88722556-33c5-49ba-ad41-ce3dd13f408d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.579828] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1277.641051] env[69982]: DEBUG nova.network.neutron [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.725172] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49549} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.725452] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1277.725670] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1277.725973] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29c158a0-8764-489f-a3a1-49cd9c69b6c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.732848] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1277.732848] env[69982]: value = "task-3865650" [ 1277.732848] env[69982]: _type = "Task" [ 1277.732848] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.741770] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.966404] env[69982]: DEBUG nova.compute.utils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1277.969090] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1277.969090] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1277.993543] env[69982]: DEBUG nova.objects.base [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1278.009985] env[69982]: DEBUG nova.policy [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1278.106599] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.106872] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.108519] env[69982]: INFO nova.compute.claims [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1278.111705] env[69982]: INFO nova.compute.manager [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Took 10.59 seconds to build instance. [ 1278.144559] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.172940] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1241093ddd1f9fdf9838c05a874b3cc2',container_format='bare',created_at=2025-05-07T07:15:06Z,direct_url=,disk_format='vmdk',id=f2a59d2b-6bba-4229-910d-72dc8d0f9184,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1555048463-shelved',owner='7372e00e1966430da6131e02f199ba14',properties=ImageMetaProps,protected=,size=31662080,status='active',tags=,updated_at=2025-05-07T07:15:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1278.173226] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1278.173381] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1278.173558] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1278.173704] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1278.173846] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1278.174069] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1278.174231] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1278.174398] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1278.174557] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1278.174724] env[69982]: DEBUG nova.virt.hardware [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1278.175895] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2db0808-ccf7-4097-a155-90792efaa25b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.185652] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e57b4d-53c1-4206-9404-86eb7c99922d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.200104] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:5c:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f973e93a-f258-4ccd-a732-c323a3202bb3', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1278.207673] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1278.207987] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1278.208259] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0193b0d3-c0b1-4e84-81fd-d9ae767a721a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.228283] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1278.228283] env[69982]: value = "task-3865651" [ 1278.228283] env[69982]: _type = "Task" [ 1278.228283] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.236743] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865651, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.244680] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066092} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.244968] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1278.245769] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f112d7-8601-4dfb-919f-1a84195560d4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.266687] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1278.266913] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0543cd32-d81c-4b22-ac1b-0476edac3e40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.286859] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1278.286859] env[69982]: value = "task-3865652" [ 1278.286859] env[69982]: _type = "Task" [ 1278.286859] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.295682] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865652, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.302309] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Successfully created port: 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1278.471251] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1278.618564] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9be4e36f-847e-4cc1-8beb-a99adee5f8ce tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.106s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.734398] env[69982]: DEBUG nova.network.neutron [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [{"id": "56cebe1d-243c-4f51-b0a0-200e18141707", "address": "fa:16:3e:49:24:7a", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56cebe1d-24", "ovs_interfaceid": "56cebe1d-243c-4f51-b0a0-200e18141707", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.741255] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865651, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.797577] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865652, 'name': ReconfigVM_Task, 'duration_secs': 0.313115} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.797872] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1278.798584] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38624307-e0fd-43cd-9df1-6d4f6a91efd9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.805811] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1278.805811] env[69982]: value = "task-3865653" [ 1278.805811] env[69982]: _type = "Task" [ 1278.805811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.810866] env[69982]: DEBUG nova.compute.manager [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1278.811069] env[69982]: DEBUG nova.compute.manager [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing instance network info cache due to event network-changed-f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1278.811274] env[69982]: DEBUG oslo_concurrency.lockutils [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] Acquiring lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.811416] env[69982]: DEBUG oslo_concurrency.lockutils [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] Acquired lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1278.811574] env[69982]: DEBUG nova.network.neutron [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Refreshing network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1278.818406] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865653, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.239855] env[69982]: DEBUG oslo_concurrency.lockutils [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "refresh_cache-ce24e165-230a-44bb-ae46-d1479e71585a" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.241383] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865651, 'name': CreateVM_Task, 'duration_secs': 0.865287} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.242717] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1279.243646] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42adb559-23ae-48a0-95a6-0296e1bea916 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.247025] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.247223] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.247632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1279.247932] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f9882a-0e58-45fb-8ed2-710c2e27d31d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.254672] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1279.254672] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52bc919a-d69a-d508-5360-5cb0246d1a11" [ 1279.254672] env[69982]: _type = "Task" [ 1279.254672] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.261246] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8939ede2-655a-4c6f-a15c-1762a70a69ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.294406] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1279.294695] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Processing image f2a59d2b-6bba-4229-910d-72dc8d0f9184 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1279.294972] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1279.295158] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1279.295469] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1279.296290] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da584524-05a7-4dec-a6b7-8e23d4c989a1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.298777] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cadfefc-e3da-4068-8d42-89cb5e965368 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.309952] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456a8564-a24d-4eb0-8467-af84f3859914 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.325477] env[69982]: DEBUG nova.compute.provider_tree [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1279.330539] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1279.330697] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1279.331447] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865653, 'name': Rename_Task, 'duration_secs': 0.163743} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.331894] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f18c3ca4-d2ea-4145-9693-396465cf1939 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.334287] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1279.335130] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d18e3445-e30b-452a-8d48-4dc046c3dd58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.340653] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1279.340653] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527bb0f3-7062-4b0d-d85e-bb8471ce1c8a" [ 1279.340653] env[69982]: _type = "Task" [ 1279.340653] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.348945] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1279.348945] env[69982]: value = "task-3865654" [ 1279.348945] env[69982]: _type = "Task" [ 1279.348945] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.358267] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]527bb0f3-7062-4b0d-d85e-bb8471ce1c8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.365147] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865654, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.483909] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1279.513987] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1279.514385] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1279.514573] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1279.514769] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1279.514985] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1279.515235] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1279.515525] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1279.515742] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1279.515963] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1279.516172] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1279.516458] env[69982]: DEBUG nova.virt.hardware [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1279.517549] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ccf4344-90b4-4be1-9e06-083598932114 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.532244] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b63123-298d-4da3-80f1-2f3ed1758c34 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.559968] env[69982]: DEBUG nova.network.neutron [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updated VIF entry in instance network info cache for port f973e93a-f258-4ccd-a732-c323a3202bb3. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1279.560474] env[69982]: DEBUG nova.network.neutron [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [{"id": "f973e93a-f258-4ccd-a732-c323a3202bb3", "address": "fa:16:3e:73:5c:dd", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf973e93a-f2", "ovs_interfaceid": "f973e93a-f258-4ccd-a732-c323a3202bb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1279.786666] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Successfully updated port: 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1279.833309] env[69982]: DEBUG nova.scheduler.client.report [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1279.857407] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1279.857674] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Fetch image to [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35/OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1279.857853] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Downloading stream optimized image f2a59d2b-6bba-4229-910d-72dc8d0f9184 to [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35/OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35.vmdk on the data store datastore2 as vApp {{(pid=69982) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1279.858033] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Downloading image file data f2a59d2b-6bba-4229-910d-72dc8d0f9184 to the ESX as VM named 'OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35' {{(pid=69982) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1279.866693] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865654, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.950420] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1279.950420] env[69982]: value = "resgroup-9" [ 1279.950420] env[69982]: _type = "ResourcePool" [ 1279.950420] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1279.950852] env[69982]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-fa45a3bc-7c2a-43da-97d9-1a9ac2ebe49a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.974968] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease: (returnval){ [ 1279.974968] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0d2bc-6051-f385-02d6-221f17bc1b08" [ 1279.974968] env[69982]: _type = "HttpNfcLease" [ 1279.974968] env[69982]: } obtained for vApp import into resource pool (val){ [ 1279.974968] env[69982]: value = "resgroup-9" [ 1279.974968] env[69982]: _type = "ResourcePool" [ 1279.974968] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1279.975370] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the lease: (returnval){ [ 1279.975370] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0d2bc-6051-f385-02d6-221f17bc1b08" [ 1279.975370] env[69982]: _type = "HttpNfcLease" [ 1279.975370] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1279.982927] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1279.982927] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0d2bc-6051-f385-02d6-221f17bc1b08" [ 1279.982927] env[69982]: _type = "HttpNfcLease" [ 1279.982927] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1280.064448] env[69982]: DEBUG oslo_concurrency.lockutils [req-b90ccebb-cfb7-44b9-b48a-c6dc56d16b70 req-edc81a0e-579e-4863-a7b9-1a899c6f68ef service nova] Releasing lock "refresh_cache-7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.247370] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1280.247758] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8c34c67-c96d-4ecf-8840-ecdde4f7c1d1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.256814] env[69982]: DEBUG oslo_vmware.api [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1280.256814] env[69982]: value = "task-3865656" [ 1280.256814] env[69982]: _type = "Task" [ 1280.256814] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.266382] env[69982]: DEBUG oslo_vmware.api [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.289309] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.289503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.289682] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1280.339037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.232s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.339667] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1280.365246] env[69982]: DEBUG oslo_vmware.api [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865654, 'name': PowerOnVM_Task, 'duration_secs': 0.635652} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.365528] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1280.365734] env[69982]: INFO nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Took 6.33 seconds to spawn the instance on the hypervisor. [ 1280.365908] env[69982]: DEBUG nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1280.366797] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504fcf21-653c-47e1-b1c1-a80d58704c84 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.486824] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1280.486824] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0d2bc-6051-f385-02d6-221f17bc1b08" [ 1280.486824] env[69982]: _type = "HttpNfcLease" [ 1280.486824] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1280.487180] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1280.487180] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f0d2bc-6051-f385-02d6-221f17bc1b08" [ 1280.487180] env[69982]: _type = "HttpNfcLease" [ 1280.487180] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1280.487944] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cd8a68-935f-47dc-bc7c-2cb801ceebc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.496367] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1280.496580] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HTTP connection to write to file with size = 31662080 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1280.559041] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a2cfd6ff-718f-4ed4-85b0-df7f53d8a946 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.771517] env[69982]: DEBUG oslo_vmware.api [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865656, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.828298] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1280.839781] env[69982]: DEBUG nova.compute.manager [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-vif-plugged-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1280.840058] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.840214] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.840382] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.840613] env[69982]: DEBUG nova.compute.manager [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] No waiting events found dispatching network-vif-plugged-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1280.840772] env[69982]: WARNING nova.compute.manager [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received unexpected event network-vif-plugged-90606ccc-f773-4313-9003-0b3239a7ca18 for instance with vm_state building and task_state spawning. [ 1280.840876] env[69982]: DEBUG nova.compute.manager [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1280.841042] env[69982]: DEBUG nova.compute.manager [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1280.841210] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.844260] env[69982]: DEBUG nova.compute.utils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1280.845679] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1280.845924] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1280.886772] env[69982]: INFO nova.compute.manager [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Took 13.13 seconds to build instance. [ 1280.893831] env[69982]: DEBUG nova.policy [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08d17e3ac5f40c6890dc8dcc4c559d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'efe64e69253d49a6a1146f240506ce39', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1281.011449] env[69982]: DEBUG nova.network.neutron [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.271257] env[69982]: DEBUG oslo_vmware.api [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865656, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.272345] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Successfully created port: dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1281.349912] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1281.391567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4e5e5cdd-b25f-4453-92c7-3086a7b87462 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.644s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.515065] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1281.515224] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Instance network_info: |[{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1281.515516] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1281.515719] env[69982]: DEBUG nova.network.neutron [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1281.516960] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:cb:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90606ccc-f773-4313-9003-0b3239a7ca18', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1281.525370] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1281.529867] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1281.532997] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5c82474-981f-438f-beca-083f874eac9f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.566244] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1281.566244] env[69982]: value = "task-3865657" [ 1281.566244] env[69982]: _type = "Task" [ 1281.566244] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.577907] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865657, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.687275] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1281.687529] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1281.688535] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c198fc8b-df22-4e50-9a7a-00e11fe5deba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.697214] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1281.697457] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1281.697789] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-427119d0-d8a1-43c4-ad1d-2e87d7a14d7e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.772916] env[69982]: DEBUG oslo_vmware.api [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865656, 'name': PowerOnVM_Task, 'duration_secs': 1.338531} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.773216] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1281.773416] env[69982]: DEBUG nova.compute.manager [None req-28807e2f-a3a0-4945-b0c0-0f525b1822ec tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1281.774229] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650b39ca-a9c5-4c4f-a5d8-4a3cbcf543df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.871998] env[69982]: INFO nova.compute.manager [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Rebuilding instance [ 1281.911575] env[69982]: DEBUG oslo_vmware.rw_handles [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5249e472-8416-ac87-6435-03211d25ed8a/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1281.911819] env[69982]: INFO nova.virt.vmwareapi.images [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Downloaded image file data f2a59d2b-6bba-4229-910d-72dc8d0f9184 [ 1281.912692] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e639dc00-4129-487c-a0a7-21dcff608d11 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.921754] env[69982]: DEBUG nova.compute.manager [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1281.922946] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d50c81e-11d4-4ad8-9a19-c4fd2e9b0860 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.939985] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ff95331-0439-43d4-bd59-a1ab6a3ca717 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.972922] env[69982]: INFO nova.virt.vmwareapi.images [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] The imported VM was unregistered [ 1281.975630] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1281.975939] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1281.976519] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b350f9a6-bf69-45a7-82b9-fe206e4624f4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.988997] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1281.989256] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35/OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35.vmdk to [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk. {{(pid=69982) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1281.989469] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-13040940-f79b-49f5-a206-107d1e835a1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.998853] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1281.998853] env[69982]: value = "task-3865659" [ 1281.998853] env[69982]: _type = "Task" [ 1281.998853] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.008244] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.077295] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865657, 'name': CreateVM_Task, 'duration_secs': 0.458211} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.077478] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1282.078200] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.078371] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.078708] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1282.078972] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebf5c54a-7a7a-4178-8dbd-d6e03cf7d976 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.084840] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1282.084840] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526016f5-ab00-e4ac-789c-9de3f281915f" [ 1282.084840] env[69982]: _type = "Task" [ 1282.084840] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.093424] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526016f5-ab00-e4ac-789c-9de3f281915f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.283621] env[69982]: DEBUG nova.network.neutron [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1282.284017] env[69982]: DEBUG nova.network.neutron [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1282.362462] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1282.393099] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1282.393380] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1282.393533] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1282.393713] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1282.393858] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1282.394032] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1282.394331] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1282.394503] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1282.394764] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1282.395068] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1282.395225] env[69982]: DEBUG nova.virt.hardware [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1282.396537] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841918a0-a89f-4e26-9578-d645867a2bee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.406544] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d68d83-de8c-4c7b-9bba-3878e3f0d68e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.510452] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.595557] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]526016f5-ab00-e4ac-789c-9de3f281915f, 'name': SearchDatastore_Task, 'duration_secs': 0.018684} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.595948] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.596178] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1282.596428] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1282.596579] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1282.596759] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1282.597049] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-675fb88e-44e7-49a0-a6b3-e6d49baff414 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.615138] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1282.615353] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1282.616190] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f648b56-3f8d-44ed-92c1-535316185a6b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.622832] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1282.622832] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667c16-c038-f09b-fdce-989aab4c573e" [ 1282.622832] env[69982]: _type = "Task" [ 1282.622832] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.631694] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667c16-c038-f09b-fdce-989aab4c573e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.706286] env[69982]: DEBUG nova.compute.manager [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Received event network-vif-plugged-dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1282.706534] env[69982]: DEBUG oslo_concurrency.lockutils [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] Acquiring lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.706751] env[69982]: DEBUG oslo_concurrency.lockutils [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.706929] env[69982]: DEBUG oslo_concurrency.lockutils [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.707126] env[69982]: DEBUG nova.compute.manager [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] No waiting events found dispatching network-vif-plugged-dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1282.707301] env[69982]: WARNING nova.compute.manager [req-9754dbeb-ee4c-4e44-b3c1-b7d2ccf1e207 req-20afe1d0-c9d9-46d5-9be7-31e03bedf986 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Received unexpected event network-vif-plugged-dc946213-769e-4998-bc51-5344491a5d10 for instance with vm_state building and task_state spawning. [ 1282.788402] env[69982]: DEBUG oslo_concurrency.lockutils [req-19101ff1-ee77-4c81-a746-b087909d81f1 req-ee407370-692b-4e16-ac96-be1574c6c903 service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.798233] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Successfully updated port: dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1282.951896] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1282.952243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-295f64d8-b72e-4e27-bafd-acfee5da06dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.961145] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1282.961145] env[69982]: value = "task-3865660" [ 1282.961145] env[69982]: _type = "Task" [ 1282.961145] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.972324] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.011407] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.136101] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52667c16-c038-f09b-fdce-989aab4c573e, 'name': SearchDatastore_Task, 'duration_secs': 0.091645} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.138049] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc8b9736-a69f-442f-8919-0dcb99afcd9d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.143993] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1283.143993] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520854a1-0c91-75b4-2002-c23cfde0910d" [ 1283.143993] env[69982]: _type = "Task" [ 1283.143993] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.153734] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520854a1-0c91-75b4-2002-c23cfde0910d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.304672] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.304959] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.305164] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1283.472977] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865660, 'name': PowerOffVM_Task, 'duration_secs': 0.250343} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.473303] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1283.473540] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1283.474397] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e83166-9111-4930-adc4-3c43447d010e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.482825] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1283.483147] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e464c303-760b-43c1-8dd5-226fcb2f7209 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.510943] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1283.512035] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1283.512035] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleting the datastore file [datastore2] 8a2c1c8c-a2a8-482b-9004-41971ed2b493 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1283.512035] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2be5b066-8011-4046-833d-34737f6089ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.517680] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.523045] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1283.523045] env[69982]: value = "task-3865662" [ 1283.523045] env[69982]: _type = "Task" [ 1283.523045] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.532187] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.534824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.535189] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.655104] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520854a1-0c91-75b4-2002-c23cfde0910d, 'name': SearchDatastore_Task, 'duration_secs': 0.091283} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.655354] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.655620] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e778d67f-13de-4a50-9c46-137bdbfd4ddf/e778d67f-13de-4a50-9c46-137bdbfd4ddf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1283.655906] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59f75007-67b4-4c99-a3f5-9daa3f8ed431 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.663519] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1283.663519] env[69982]: value = "task-3865663" [ 1283.663519] env[69982]: _type = "Task" [ 1283.663519] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.672757] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.841466] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1283.993924] env[69982]: DEBUG nova.network.neutron [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.013184] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.033746] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.039114] env[69982]: DEBUG nova.compute.utils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1284.175343] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865663, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.497503] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.497900] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Instance network_info: |[{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1284.499096] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:7b:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27138a4c-60c9-45fb-bf37-4c2f765315a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc946213-769e-4998-bc51-5344491a5d10', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1284.507878] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1284.508277] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1284.512648] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05ce9454-77f7-46eb-b98c-4c9f9a1f1eb2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.537350] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.540168] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1284.540168] env[69982]: value = "task-3865664" [ 1284.540168] env[69982]: _type = "Task" [ 1284.540168] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.548063] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.548630] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.017872} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.549389] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1284.549583] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1284.549830] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1284.557562] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865664, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.676786] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865663, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.734759] env[69982]: DEBUG nova.compute.manager [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Received event network-changed-dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1284.735016] env[69982]: DEBUG nova.compute.manager [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Refreshing instance network info cache due to event network-changed-dc946213-769e-4998-bc51-5344491a5d10. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1284.735291] env[69982]: DEBUG oslo_concurrency.lockutils [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] Acquiring lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1284.735404] env[69982]: DEBUG oslo_concurrency.lockutils [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] Acquired lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.735550] env[69982]: DEBUG nova.network.neutron [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Refreshing network info cache for port dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1285.020186] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865659, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.59662} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.020435] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35/OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35.vmdk to [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk. [ 1285.020631] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Cleaning up location [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1285.020798] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_1e1be922-650d-4e13-b2c0-4ffc851b4b35 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1285.021133] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57d6c50f-d3cc-4f55-884d-36923733565a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.028364] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1285.028364] env[69982]: value = "task-3865665" [ 1285.028364] env[69982]: _type = "Task" [ 1285.028364] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.038036] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.051692] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865664, 'name': CreateVM_Task} progress is 25%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.174985] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865663, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.280682} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.175444] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] e778d67f-13de-4a50-9c46-137bdbfd4ddf/e778d67f-13de-4a50-9c46-137bdbfd4ddf.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.175853] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1285.175853] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97b4c154-4d57-4856-8662-917d4f0f1d3c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.183272] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1285.183272] env[69982]: value = "task-3865666" [ 1285.183272] env[69982]: _type = "Task" [ 1285.183272] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.194402] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865666, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.443458] env[69982]: DEBUG nova.network.neutron [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updated VIF entry in instance network info cache for port dc946213-769e-4998-bc51-5344491a5d10. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1285.443845] env[69982]: DEBUG nova.network.neutron [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.542673] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071402} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.547376] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1285.547637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.547992] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk to [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1285.548394] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ed4f01c-0827-4e07-8fb9-3a229513bbb1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.559856] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865664, 'name': CreateVM_Task, 'duration_secs': 0.79026} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.561950] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1285.562461] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1285.562461] env[69982]: value = "task-3865667" [ 1285.562461] env[69982]: _type = "Task" [ 1285.562461] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.563423] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.563722] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.564247] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1285.568549] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0837bfbd-2c4a-4961-ae2f-c4038aca20c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.580775] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1285.580775] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52859086-a31c-59a6-45d7-7a83541f4141" [ 1285.580775] env[69982]: _type = "Task" [ 1285.580775] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.585657] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.595920] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52859086-a31c-59a6-45d7-7a83541f4141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.598173] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1285.598416] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1285.598578] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1285.598762] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1285.598909] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1285.599078] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1285.599288] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1285.599450] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1285.599615] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1285.599783] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1285.599938] env[69982]: DEBUG nova.virt.hardware [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1285.600742] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e87d99-5f56-4359-ac90-984376ad4ae5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.608745] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594006d2-d401-41f2-a315-e2df20d3602c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.614783] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.615114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.615392] env[69982]: INFO nova.compute.manager [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Attaching volume d30450bb-117a-4b19-9de9-95fbe14d21e2 to /dev/sdb [ 1285.626344] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance VIF info [] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1285.632175] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.635171] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1285.635604] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de444939-8d17-44f6-887a-e1f0beaf892d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.652553] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1285.652553] env[69982]: value = "task-3865668" [ 1285.652553] env[69982]: _type = "Task" [ 1285.652553] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.660985] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865668, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.669024] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71641ce8-97ba-4060-ada1-cdc09f7e68c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.673856] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4dee9e-b973-45ac-b7ce-5db5941e2522 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.691155] env[69982]: DEBUG nova.virt.block_device [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updating existing volume attachment record: 911efa2d-2a0e-49f7-9761-e58cd8f23a45 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1285.699021] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865666, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072679} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.699312] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1285.700151] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92170d28-d5de-4c73-a85e-d333349c1c8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.723988] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] e778d67f-13de-4a50-9c46-137bdbfd4ddf/e778d67f-13de-4a50-9c46-137bdbfd4ddf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.724335] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29cb561d-610c-43ff-a71a-3a5dd1dc59b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.745993] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1285.745993] env[69982]: value = "task-3865669" [ 1285.745993] env[69982]: _type = "Task" [ 1285.745993] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.754525] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.946753] env[69982]: DEBUG oslo_concurrency.lockutils [req-6789a5da-6e27-48c6-a468-252903d2261d req-632e44fd-21e4-404f-af16-c0ee91b93cbb service nova] Releasing lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.074920] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.095693] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52859086-a31c-59a6-45d7-7a83541f4141, 'name': SearchDatastore_Task, 'duration_secs': 0.030194} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.096088] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.096454] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1286.096759] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.096931] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.097180] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1286.097508] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e43405c-9f4a-474b-8db5-a02e32059acf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.106336] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1286.106528] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1286.107283] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a34ef357-9875-4c68-a88c-cb38cb75d4e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.117119] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1286.117119] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213a9a3-a02a-b6ec-3e3f-f2a969d08341" [ 1286.117119] env[69982]: _type = "Task" [ 1286.117119] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.126920] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213a9a3-a02a-b6ec-3e3f-f2a969d08341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.164208] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865668, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.256647] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865669, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.575998] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.630256] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5213a9a3-a02a-b6ec-3e3f-f2a969d08341, 'name': SearchDatastore_Task, 'duration_secs': 0.092786} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.631189] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fcdbfe7-954d-454b-b165-2239c822d735 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.638209] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1286.638209] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fab7dc-76ea-702d-a126-7492f07e50bd" [ 1286.638209] env[69982]: _type = "Task" [ 1286.638209] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.648584] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fab7dc-76ea-702d-a126-7492f07e50bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.665914] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865668, 'name': CreateVM_Task, 'duration_secs': 0.558888} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.666142] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1286.666590] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1286.666756] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1286.667141] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1286.667423] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7702c307-6970-48f4-8a2a-0136723aeeea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.673715] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1286.673715] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251b1d1-069a-7d0e-d59a-8b646c59ac09" [ 1286.673715] env[69982]: _type = "Task" [ 1286.673715] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.683135] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251b1d1-069a-7d0e-d59a-8b646c59ac09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.756709] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865669, 'name': ReconfigVM_Task, 'duration_secs': 0.89838} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.757092] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfigured VM instance instance-00000079 to attach disk [datastore2] e778d67f-13de-4a50-9c46-137bdbfd4ddf/e778d67f-13de-4a50-9c46-137bdbfd4ddf.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1286.757713] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a36d2f63-05d6-4ae5-a896-df1c250cd949 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.765679] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1286.765679] env[69982]: value = "task-3865671" [ 1286.765679] env[69982]: _type = "Task" [ 1286.765679] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.775121] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865671, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.075466] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.151656] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fab7dc-76ea-702d-a126-7492f07e50bd, 'name': SearchDatastore_Task, 'duration_secs': 0.097803} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.151913] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.152158] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 2cbd6771-48dd-44bc-a0e3-96dad0a7aead/2cbd6771-48dd-44bc-a0e3-96dad0a7aead.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1287.152447] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ee9979c-a098-4182-bae1-04090ac3cbab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.161367] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1287.161367] env[69982]: value = "task-3865672" [ 1287.161367] env[69982]: _type = "Task" [ 1287.161367] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.170466] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.183233] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251b1d1-069a-7d0e-d59a-8b646c59ac09, 'name': SearchDatastore_Task, 'duration_secs': 0.015813} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.183525] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1287.183766] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1287.183998] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.184162] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.184356] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1287.184622] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb8dc616-6a50-4d65-94d6-3275b33d00a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.195481] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1287.195645] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1287.196458] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2deeedba-6068-4714-be94-01c1b00701da {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.202967] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1287.202967] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e13100-2a01-e0db-bee9-dadaf73c237e" [ 1287.202967] env[69982]: _type = "Task" [ 1287.202967] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.212588] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e13100-2a01-e0db-bee9-dadaf73c237e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.276864] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865671, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.576725] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.671864] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.714403] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52e13100-2a01-e0db-bee9-dadaf73c237e, 'name': SearchDatastore_Task, 'duration_secs': 0.017157} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.715416] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0484051-1360-4f4b-a98f-544249107ed7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.721973] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1287.721973] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52112250-e9b5-754d-0782-2c93508148a7" [ 1287.721973] env[69982]: _type = "Task" [ 1287.721973] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.731496] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52112250-e9b5-754d-0782-2c93508148a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.778392] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865671, 'name': Rename_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.077613] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.172766] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.234136] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52112250-e9b5-754d-0782-2c93508148a7, 'name': SearchDatastore_Task, 'duration_secs': 0.019183} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.234438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1288.234708] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1288.235047] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9a9c325-d943-4e63-99cb-83a213531c8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.244345] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1288.244345] env[69982]: value = "task-3865674" [ 1288.244345] env[69982]: _type = "Task" [ 1288.244345] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.255212] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.278380] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865671, 'name': Rename_Task, 'duration_secs': 1.248687} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.278814] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1288.279117] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70decd76-9581-4b1d-9979-22faeb50364e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.286816] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1288.286816] env[69982]: value = "task-3865675" [ 1288.286816] env[69982]: _type = "Task" [ 1288.286816] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.296338] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.580350] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865667, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.809025} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.580695] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/f2a59d2b-6bba-4229-910d-72dc8d0f9184/f2a59d2b-6bba-4229-910d-72dc8d0f9184.vmdk to [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1288.581566] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5c4fa1-d1a8-4f87-a715-8e861389fa4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.608137] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1288.608480] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfb2d1ec-b6e6-479b-8981-b528e66ca70b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.631259] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1288.631259] env[69982]: value = "task-3865676" [ 1288.631259] env[69982]: _type = "Task" [ 1288.631259] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.642040] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865676, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.675542] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865672, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.755640] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865674, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.798194] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865675, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.141410] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865676, 'name': ReconfigVM_Task, 'duration_secs': 0.410482} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.141699] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f/7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.142830] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'device_name': '/dev/sda', 'encryption_format': None, 'size': 0, 'guest_format': None, 'encrypted': False, 'encryption_options': None, 'boot_index': 0, 'encryption_secret_uuid': None, 'disk_bus': None, 'image_id': 'a4e69d6f-1c15-4f57-92a8-5e81c6be8172'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'mount_device': '/dev/sdb', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768114', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'name': 'volume-faf3518e-f73e-4b06-a226-42e4c81f1235', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f', 'attached_at': '', 'detached_at': '', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'serial': 'faf3518e-f73e-4b06-a226-42e4c81f1235'}, 'guest_format': None, 'attachment_id': 'f4066339-102a-4787-bcf5-5821d5a49157', 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69982) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1289.143048] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1289.143270] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768114', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'name': 'volume-faf3518e-f73e-4b06-a226-42e4c81f1235', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f', 'attached_at': '', 'detached_at': '', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'serial': 'faf3518e-f73e-4b06-a226-42e4c81f1235'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1289.144103] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae357c5c-ca7d-4854-8999-a920e355b13c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.160414] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53144f9a-6eee-46cb-8819-707b9fc31237 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.186945] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] volume-faf3518e-f73e-4b06-a226-42e4c81f1235/volume-faf3518e-f73e-4b06-a226-42e4c81f1235.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.187794] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26613fea-86de-496f-9794-910dacbeed22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.204429] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865672, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.594285} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.205275] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore2] 2cbd6771-48dd-44bc-a0e3-96dad0a7aead/2cbd6771-48dd-44bc-a0e3-96dad0a7aead.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1289.205548] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1289.205854] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0b7b23d-e6ca-4626-9aa0-4d19c92aee8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.211951] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1289.211951] env[69982]: value = "task-3865677" [ 1289.211951] env[69982]: _type = "Task" [ 1289.211951] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.213283] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1289.213283] env[69982]: value = "task-3865678" [ 1289.213283] env[69982]: _type = "Task" [ 1289.213283] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.228447] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865677, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.231952] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865678, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.255400] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790125} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.255675] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1289.255895] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1289.256183] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5de16958-2e1d-4554-aebe-89b3ae2bcf13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.263841] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1289.263841] env[69982]: value = "task-3865679" [ 1289.263841] env[69982]: _type = "Task" [ 1289.263841] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.273083] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865679, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.297694] env[69982]: DEBUG oslo_vmware.api [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865675, 'name': PowerOnVM_Task, 'duration_secs': 0.813085} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.297977] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1289.298203] env[69982]: INFO nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Took 9.81 seconds to spawn the instance on the hypervisor. [ 1289.298380] env[69982]: DEBUG nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1289.299226] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494e76d9-4635-4805-bdb4-765614fc87cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.726516] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865677, 'name': ReconfigVM_Task, 'duration_secs': 0.332155} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.729538] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfigured VM instance instance-00000072 to attach disk [datastore2] volume-faf3518e-f73e-4b06-a226-42e4c81f1235/volume-faf3518e-f73e-4b06-a226-42e4c81f1235.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1289.734486] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865678, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084256} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.734707] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3224050-7ae7-46f4-8d8f-1f77e70add13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.744280] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1289.744922] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec66327-79e8-4f54-ad12-fb0744b6cc2a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.767011] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 2cbd6771-48dd-44bc-a0e3-96dad0a7aead/2cbd6771-48dd-44bc-a0e3-96dad0a7aead.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.768404] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65ade597-7c1a-4a78-860d-031dbe19929c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.782811] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1289.782811] env[69982]: value = "task-3865680" [ 1289.782811] env[69982]: _type = "Task" [ 1289.782811] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.791787] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1289.791787] env[69982]: value = "task-3865681" [ 1289.791787] env[69982]: _type = "Task" [ 1289.791787] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.797527] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865679, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06946} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.797744] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865680, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.800618] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1289.801384] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f74bae6-364e-4644-b880-ebb580ba2108 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.808835] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865681, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.831012] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1289.832999] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45c9d3bc-7a73-432f-880b-e9afbbd6ea92 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.850054] env[69982]: INFO nova.compute.manager [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Took 18.80 seconds to build instance. [ 1289.856532] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1289.856532] env[69982]: value = "task-3865682" [ 1289.856532] env[69982]: _type = "Task" [ 1289.856532] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.866393] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865682, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.239101] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1290.239397] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768124', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'name': 'volume-d30450bb-117a-4b19-9de9-95fbe14d21e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb226b65-d91f-4216-9844-37c22d3705a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'serial': 'd30450bb-117a-4b19-9de9-95fbe14d21e2'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1290.240364] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9509305-3171-4650-af78-90d18dbad1e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.260622] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d6996e8-ad17-48a0-886a-93a369de3bcd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.287332] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-d30450bb-117a-4b19-9de9-95fbe14d21e2/volume-d30450bb-117a-4b19-9de9-95fbe14d21e2.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1290.287743] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-485e66cc-b191-4739-b770-32178348e5d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.313921] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865681, 'name': ReconfigVM_Task, 'duration_secs': 0.343704} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.318023] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 2cbd6771-48dd-44bc-a0e3-96dad0a7aead/2cbd6771-48dd-44bc-a0e3-96dad0a7aead.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1290.318566] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865680, 'name': ReconfigVM_Task, 'duration_secs': 0.150091} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.318845] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1290.318845] env[69982]: value = "task-3865683" [ 1290.318845] env[69982]: _type = "Task" [ 1290.318845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.319052] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc39d2ee-e110-433c-afad-f616e62cb721 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.320758] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768114', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'name': 'volume-faf3518e-f73e-4b06-a226-42e4c81f1235', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f', 'attached_at': '', 'detached_at': '', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'serial': 'faf3518e-f73e-4b06-a226-42e4c81f1235'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1290.321320] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9e3f699-7b8f-48db-b92e-fd40ebe671cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.333153] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865683, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.335134] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1290.335134] env[69982]: value = "task-3865685" [ 1290.335134] env[69982]: _type = "Task" [ 1290.335134] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.335467] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1290.335467] env[69982]: value = "task-3865684" [ 1290.335467] env[69982]: _type = "Task" [ 1290.335467] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.347773] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865685, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.351232] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865684, 'name': Rename_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.352793] env[69982]: DEBUG oslo_concurrency.lockutils [None req-dd9c988a-7d24-4273-bfce-ad30c6bf47da tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.309s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.368993] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865682, 'name': ReconfigVM_Task, 'duration_secs': 0.298541} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.369319] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 8a2c1c8c-a2a8-482b-9004-41971ed2b493/8a2c1c8c-a2a8-482b-9004-41971ed2b493.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1290.370036] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5841ecb8-4120-4258-aa9e-c6e2623be1d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.376910] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1290.376910] env[69982]: value = "task-3865686" [ 1290.376910] env[69982]: _type = "Task" [ 1290.376910] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.390873] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865686, 'name': Rename_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.779229] env[69982]: DEBUG nova.compute.manager [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1290.779502] env[69982]: DEBUG nova.compute.manager [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1290.779934] env[69982]: DEBUG oslo_concurrency.lockutils [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1290.779934] env[69982]: DEBUG oslo_concurrency.lockutils [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1290.779934] env[69982]: DEBUG nova.network.neutron [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1290.832488] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865683, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.848200] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865685, 'name': Rename_Task, 'duration_secs': 0.182986} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.851425] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.852085] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865684, 'name': Rename_Task, 'duration_secs': 0.203487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.852085] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af8567fe-a1ab-40fb-a618-d0d063849ccc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.853708] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.853938] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5680dcad-bca8-4d6e-becc-3c9bd41b02f1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.862073] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1290.862073] env[69982]: value = "task-3865688" [ 1290.862073] env[69982]: _type = "Task" [ 1290.862073] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.863680] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1290.863680] env[69982]: value = "task-3865687" [ 1290.863680] env[69982]: _type = "Task" [ 1290.863680] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.877169] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.880705] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865688, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.889590] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865686, 'name': Rename_Task, 'duration_secs': 0.156527} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.889866] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.890139] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf08d9fc-3c0e-461d-949f-2319a6f298d9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.897356] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1290.897356] env[69982]: value = "task-3865689" [ 1290.897356] env[69982]: _type = "Task" [ 1290.897356] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.906883] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.332704] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865683, 'name': ReconfigVM_Task, 'duration_secs': 0.76467} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.332996] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-d30450bb-117a-4b19-9de9-95fbe14d21e2/volume-d30450bb-117a-4b19-9de9-95fbe14d21e2.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1291.338114] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e659d14-7de1-4ef5-9abf-f4ad831fda90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.358408] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1291.358408] env[69982]: value = "task-3865690" [ 1291.358408] env[69982]: _type = "Task" [ 1291.358408] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.368630] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865690, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.379997] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865688, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.383501] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865687, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.415031] env[69982]: DEBUG oslo_vmware.api [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865689, 'name': PowerOnVM_Task, 'duration_secs': 0.507284} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.415031] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1291.415813] env[69982]: DEBUG nova.compute.manager [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1291.419023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef42a1de-d688-4947-a5b3-ad6ac6ac7894 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.525603] env[69982]: DEBUG nova.network.neutron [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1291.526077] env[69982]: DEBUG nova.network.neutron [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.749802] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.750043] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.871353] env[69982]: DEBUG oslo_vmware.api [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865690, 'name': ReconfigVM_Task, 'duration_secs': 0.235108} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.875161] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768124', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'name': 'volume-d30450bb-117a-4b19-9de9-95fbe14d21e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb226b65-d91f-4216-9844-37c22d3705a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'serial': 'd30450bb-117a-4b19-9de9-95fbe14d21e2'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1291.890135] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865688, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.896093] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865687, 'name': PowerOnVM_Task} progress is 79%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.942899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1291.943345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.943666] env[69982]: DEBUG nova.objects.instance [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69982) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1292.029354] env[69982]: DEBUG oslo_concurrency.lockutils [req-df330ed4-d6de-4922-af16-acf4538c1bab req-ec531995-865f-4029-8d23-45108f213490 service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.252852] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1292.389815] env[69982]: DEBUG oslo_vmware.api [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865687, 'name': PowerOnVM_Task, 'duration_secs': 1.322734} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.392198] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1292.392457] env[69982]: INFO nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1292.392608] env[69982]: DEBUG nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1292.392978] env[69982]: DEBUG oslo_vmware.api [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865688, 'name': PowerOnVM_Task, 'duration_secs': 1.19164} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.393718] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf71ae72-c892-49b1-bb33-bc0b89dd5b7d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.396481] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1292.502505] env[69982]: DEBUG nova.compute.manager [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1292.503644] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf71ff0e-c11a-4e51-b260-db7ff6d63907 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.775944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.916366] env[69982]: INFO nova.compute.manager [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Took 14.83 seconds to build instance. [ 1292.937240] env[69982]: DEBUG nova.objects.instance [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid cb226b65-d91f-4216-9844-37c22d3705a7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.945542] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.945845] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.946177] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1292.946616] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.946616] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.949057] env[69982]: INFO nova.compute.manager [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Terminating instance [ 1292.953339] env[69982]: DEBUG oslo_concurrency.lockutils [None req-26dbbebc-6571-4f30-bafa-4203c51f7a19 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.954689] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.179s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.956256] env[69982]: INFO nova.compute.claims [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1293.021637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9dabeb34-2941-4a55-98ad-74eca85c2796 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 32.560s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.418701] env[69982]: DEBUG oslo_concurrency.lockutils [None req-019aecc3-cdd3-429b-b222-53eddea72f88 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.348s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.441714] env[69982]: DEBUG oslo_concurrency.lockutils [None req-77c1627f-06f7-464e-a316-0bc05ce9110e tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.827s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.452699] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "refresh_cache-8a2c1c8c-a2a8-482b-9004-41971ed2b493" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.452980] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "refresh_cache-8a2c1c8c-a2a8-482b-9004-41971ed2b493" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.453291] env[69982]: DEBUG nova.network.neutron [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1293.601961] env[69982]: DEBUG nova.compute.manager [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Received event network-changed-dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1293.602137] env[69982]: DEBUG nova.compute.manager [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Refreshing instance network info cache due to event network-changed-dc946213-769e-4998-bc51-5344491a5d10. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1293.602366] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] Acquiring lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1293.602484] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] Acquired lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1293.602646] env[69982]: DEBUG nova.network.neutron [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Refreshing network info cache for port dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1293.975747] env[69982]: DEBUG nova.network.neutron [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1294.027468] env[69982]: DEBUG nova.network.neutron [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.100516] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ba207c-76b7-45d7-a97f-e4e7bacc504f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.111860] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06e7a9f-49eb-4c6c-91fa-a184f0768643 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.144857] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb110a0-ac58-445f-9b42-7ba012098838 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.157017] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aef5d4-92ab-4971-aa1b-7d843b168e04 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.162716] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.162716] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.174551] env[69982]: DEBUG nova.compute.provider_tree [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1294.328943] env[69982]: DEBUG nova.network.neutron [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updated VIF entry in instance network info cache for port dc946213-769e-4998-bc51-5344491a5d10. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1294.329365] env[69982]: DEBUG nova.network.neutron [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.529974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "refresh_cache-8a2c1c8c-a2a8-482b-9004-41971ed2b493" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.530481] env[69982]: DEBUG nova.compute.manager [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1294.530678] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1294.534208] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a60ec7b-3984-476d-aea6-8f6033ada4d2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.542864] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1294.543210] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4724dd2-d6c5-4948-9c2f-def6b9a2de8c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.551032] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1294.551032] env[69982]: value = "task-3865691" [ 1294.551032] env[69982]: _type = "Task" [ 1294.551032] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.560602] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.665933] env[69982]: INFO nova.compute.manager [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Detaching volume d30450bb-117a-4b19-9de9-95fbe14d21e2 [ 1294.698407] env[69982]: ERROR nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [req-e3bcedf2-4ee1-449f-a1ef-6160a6671057] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 206a5498-2e79-46c1-a636-9488a05fb67d. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e3bcedf2-4ee1-449f-a1ef-6160a6671057"}]} [ 1294.703649] env[69982]: INFO nova.virt.block_device [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Attempting to driver detach volume d30450bb-117a-4b19-9de9-95fbe14d21e2 from mountpoint /dev/sdb [ 1294.703918] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1294.704203] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768124', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'name': 'volume-d30450bb-117a-4b19-9de9-95fbe14d21e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb226b65-d91f-4216-9844-37c22d3705a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'serial': 'd30450bb-117a-4b19-9de9-95fbe14d21e2'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1294.705049] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fed1004-323f-4bb8-9bc3-babfa62f2bc5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.728545] env[69982]: DEBUG nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing inventories for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1294.731392] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b070c137-3b2b-41c7-8b94-9b2ee34cb6f6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.739738] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb32490-34f0-433e-8622-ab89a78845ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.762550] env[69982]: DEBUG nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating ProviderTree inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1294.762875] env[69982]: DEBUG nova.compute.provider_tree [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1294.765496] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a906b147-2443-4d23-94a4-8ab857413fbe {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.781943] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] The volume has not been displaced from its original location: [datastore1] volume-d30450bb-117a-4b19-9de9-95fbe14d21e2/volume-d30450bb-117a-4b19-9de9-95fbe14d21e2.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1294.787913] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1294.789085] env[69982]: DEBUG nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing aggregate associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, aggregates: None {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1294.791162] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4e6ddab-b99c-42e1-8671-06dba0222f7b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.811708] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1294.811708] env[69982]: value = "task-3865692" [ 1294.811708] env[69982]: _type = "Task" [ 1294.811708] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.821245] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865692, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.824893] env[69982]: DEBUG nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Refreshing trait associations for resource provider 206a5498-2e79-46c1-a636-9488a05fb67d, traits: COMPUTE_NET_ATTACH_INTERFACE,HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO {{(pid=69982) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1294.832639] env[69982]: DEBUG oslo_concurrency.lockutils [req-0fea1ce0-05a4-4239-bebb-8e79aeef1878 req-53ae31f6-0b7a-4978-873b-f130a5058eb8 service nova] Releasing lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1294.965435] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9e6a61-fa42-430d-81ed-db3a25bf1386 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.974755] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73a13f1-d4d8-41d8-a3fb-e4ceb5e12093 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.014449] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7135c978-6ef2-4c97-8aec-2f5e7428c93e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.023603] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42dcb20-0f88-4557-8fbd-abc5c0ae2281 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.038985] env[69982]: DEBUG nova.compute.provider_tree [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1295.061146] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865691, 'name': PowerOffVM_Task, 'duration_secs': 0.118161} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.061429] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1295.061595] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1295.061853] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7a5a1be-5c2c-4373-a48c-acdab256fdb9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.089182] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1295.089425] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1295.089607] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleting the datastore file [datastore1] 8a2c1c8c-a2a8-482b-9004-41971ed2b493 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1295.089884] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09722b88-ce06-4cf2-969f-3aa6379a258d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.096870] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1295.096870] env[69982]: value = "task-3865694" [ 1295.096870] env[69982]: _type = "Task" [ 1295.096870] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.106402] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865694, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.321847] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865692, 'name': ReconfigVM_Task, 'duration_secs': 0.246821} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.322093] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1295.326897] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33ae7427-0b52-4068-b41c-8041802bf6ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.345693] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1295.345693] env[69982]: value = "task-3865695" [ 1295.345693] env[69982]: _type = "Task" [ 1295.345693] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.357233] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.571461] env[69982]: DEBUG nova.scheduler.client.report [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 175 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1295.571744] env[69982]: DEBUG nova.compute.provider_tree [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 175 to 176 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1295.571960] env[69982]: DEBUG nova.compute.provider_tree [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1295.606990] env[69982]: DEBUG oslo_vmware.api [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865694, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098325} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.607283] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1295.607460] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1295.607633] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1295.607803] env[69982]: INFO nova.compute.manager [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1295.608059] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1295.608265] env[69982]: DEBUG nova.compute.manager [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1295.608373] env[69982]: DEBUG nova.network.neutron [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1295.629780] env[69982]: DEBUG nova.network.neutron [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1295.855995] env[69982]: DEBUG oslo_vmware.api [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865695, 'name': ReconfigVM_Task, 'duration_secs': 0.253025} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.856337] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768124', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'name': 'volume-d30450bb-117a-4b19-9de9-95fbe14d21e2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb226b65-d91f-4216-9844-37c22d3705a7', 'attached_at': '', 'detached_at': '', 'volume_id': 'd30450bb-117a-4b19-9de9-95fbe14d21e2', 'serial': 'd30450bb-117a-4b19-9de9-95fbe14d21e2'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1296.077472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.123s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.078145] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1296.132715] env[69982]: DEBUG nova.network.neutron [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.396714] env[69982]: DEBUG nova.objects.instance [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'flavor' on Instance uuid cb226b65-d91f-4216-9844-37c22d3705a7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.583153] env[69982]: DEBUG nova.compute.utils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1296.584632] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1296.584801] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1296.621768] env[69982]: DEBUG nova.policy [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1296.635150] env[69982]: INFO nova.compute.manager [-] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Took 1.03 seconds to deallocate network for instance. [ 1296.947178] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Successfully created port: 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1297.087796] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1297.144262] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.144545] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.144770] env[69982]: DEBUG nova.objects.instance [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lazy-loading 'resources' on Instance uuid 8a2c1c8c-a2a8-482b-9004-41971ed2b493 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1297.408498] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f9406991-0496-4918-9ab9-cfa99f4d8241 tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.779253] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a6737f-386e-448f-a437-e20e0fdad188 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.787184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7ba2e4-b688-4d96-9797-aa4c2e5151a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.820789] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708f0ecc-53c2-4e30-a26e-44b0df629e32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.829970] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f3c4fe-ac68-4d0c-8882-56e74820b77e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.846739] env[69982]: DEBUG nova.compute.provider_tree [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.098065] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1298.122381] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1298.122654] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.122810] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1298.123039] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.123152] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1298.123300] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1298.123510] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1298.123668] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1298.123835] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1298.124008] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1298.124186] env[69982]: DEBUG nova.virt.hardware [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1298.125126] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc442c56-12ac-46df-8364-81d8153bd87d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.133847] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a10ea01c-ca1f-4b66-b3b2-e72bb0ef277c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.240886] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.241178] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.241374] env[69982]: INFO nova.compute.manager [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Shelving [ 1298.324312] env[69982]: DEBUG nova.compute.manager [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-vif-plugged-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.324533] env[69982]: DEBUG oslo_concurrency.lockutils [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.324780] env[69982]: DEBUG oslo_concurrency.lockutils [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.324922] env[69982]: DEBUG oslo_concurrency.lockutils [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.325643] env[69982]: DEBUG nova.compute.manager [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] No waiting events found dispatching network-vif-plugged-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1298.325853] env[69982]: WARNING nova.compute.manager [req-deaf3a19-7d04-4ad6-8f80-d1151ccd3bd9 req-fbacc60d-b943-4d49-830d-1a2d1f939747 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received unexpected event network-vif-plugged-93615399-3c22-4aa4-b3d0-f2c5cffc9368 for instance with vm_state building and task_state spawning. [ 1298.350602] env[69982]: DEBUG nova.scheduler.client.report [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 45, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1298.406348] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Successfully updated port: 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1298.417682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.417682] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.417899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.417988] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.418573] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.420512] env[69982]: INFO nova.compute.manager [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Terminating instance [ 1298.856438] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.712s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.879494] env[69982]: INFO nova.scheduler.client.report [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleted allocations for instance 8a2c1c8c-a2a8-482b-9004-41971ed2b493 [ 1298.909638] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.909811] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.909932] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1298.924743] env[69982]: DEBUG nova.compute.manager [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1298.924743] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1298.926332] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea05c17-32e1-4b89-a0b1-d5d2abc25d09 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.935823] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1298.936046] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10ea16c5-2e6b-40c9-98e3-960787c256e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.943853] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1298.943853] env[69982]: value = "task-3865696" [ 1298.943853] env[69982]: _type = "Task" [ 1298.943853] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.952790] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865696, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.250775] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.251098] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95b2ea7d-89c1-4806-a9e4-5aa918181906 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.258787] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1299.258787] env[69982]: value = "task-3865697" [ 1299.258787] env[69982]: _type = "Task" [ 1299.258787] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.267821] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.389739] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f215f30-4c7d-4e69-bb1c-306cc2821afe tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "8a2c1c8c-a2a8-482b-9004-41971ed2b493" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.444s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.444436] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1299.455724] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865696, 'name': PowerOffVM_Task, 'duration_secs': 0.229032} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.455974] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.456171] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1299.456427] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7922822-6666-47b7-9d6b-81d4bc34fad9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.525849] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1299.526229] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1299.526494] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleting the datastore file [datastore1] cb226b65-d91f-4216-9844-37c22d3705a7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1299.526768] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-208fcf2f-b882-4d7f-9d3c-fdd83b36e80e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.533926] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for the task: (returnval){ [ 1299.533926] env[69982]: value = "task-3865699" [ 1299.533926] env[69982]: _type = "Task" [ 1299.533926] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.542632] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865699, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.594690] env[69982]: DEBUG nova.network.neutron [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.769588] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865697, 'name': PowerOffVM_Task, 'duration_secs': 0.279972} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.769930] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.770790] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28483fb-8bb1-465b-b520-ccc9ad546376 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.791078] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1bd933-6fe0-49ef-a647-1524d4206d8f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.044676] env[69982]: DEBUG oslo_vmware.api [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Task: {'id': task-3865699, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146033} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.044951] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1300.045221] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1300.045413] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1300.046016] env[69982]: INFO nova.compute.manager [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1300.046016] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1300.046016] env[69982]: DEBUG nova.compute.manager [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1300.046207] env[69982]: DEBUG nova.network.neutron [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1300.097570] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.097999] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Instance network_info: |[{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1300.099237] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:39:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fe38bb7e-8bcb-419d-868f-0dc105c69651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93615399-3c22-4aa4-b3d0-f2c5cffc9368', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.109316] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1300.109661] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1300.109930] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-969550c9-cad5-47e6-a462-de31ed603b24 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.143312] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.143312] env[69982]: value = "task-3865700" [ 1300.143312] env[69982]: _type = "Task" [ 1300.143312] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.152686] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865700, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.305191] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1300.305191] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d5dd8955-3777-4e12-af2f-c0739be82a8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.312286] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1300.312286] env[69982]: value = "task-3865701" [ 1300.312286] env[69982]: _type = "Task" [ 1300.312286] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.323549] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865701, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.361214] env[69982]: DEBUG nova.compute.manager [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1300.361880] env[69982]: DEBUG nova.compute.manager [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing instance network info cache due to event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1300.362560] env[69982]: DEBUG oslo_concurrency.lockutils [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.363031] env[69982]: DEBUG oslo_concurrency.lockutils [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.363509] env[69982]: DEBUG nova.network.neutron [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1300.553994] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.554230] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.554450] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "ffb343e2-b631-4ff8-9da4-e07462d126c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.554633] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.556089] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.558044] env[69982]: INFO nova.compute.manager [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Terminating instance [ 1300.653572] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865700, 'name': CreateVM_Task, 'duration_secs': 0.369595} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.653805] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1300.654471] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.654632] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.654982] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1300.655274] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9073fe79-2cfb-410f-a2c2-061ddddeb5db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.660852] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1300.660852] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b817e-72d8-0031-fddd-654632839584" [ 1300.660852] env[69982]: _type = "Task" [ 1300.660852] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.670718] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b817e-72d8-0031-fddd-654632839584, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.823570] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865701, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.064893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "refresh_cache-ffb343e2-b631-4ff8-9da4-e07462d126c7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.065181] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquired lock "refresh_cache-ffb343e2-b631-4ff8-9da4-e07462d126c7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.065395] env[69982]: DEBUG nova.network.neutron [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1301.101349] env[69982]: DEBUG nova.network.neutron [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updated VIF entry in instance network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1301.101770] env[69982]: DEBUG nova.network.neutron [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.126346] env[69982]: DEBUG nova.network.neutron [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.173148] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]520b817e-72d8-0031-fddd-654632839584, 'name': SearchDatastore_Task, 'duration_secs': 0.012482} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.173507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.173768] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1301.174018] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1301.174177] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1301.174364] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1301.174634] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-779ab4bb-d182-431a-bfcc-4dd6426eea74 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.185610] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1301.185811] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1301.186673] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7eec149-bbaf-44ce-9f7f-98c233920ed9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.192665] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1301.192665] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cf1727-6545-5c86-dc2a-2a74e1436add" [ 1301.192665] env[69982]: _type = "Task" [ 1301.192665] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.202852] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cf1727-6545-5c86-dc2a-2a74e1436add, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.324687] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865701, 'name': CreateSnapshot_Task, 'duration_secs': 0.819473} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.325027] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1301.325945] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763141ed-a316-462e-b4b6-9e63537a1be0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.585489] env[69982]: DEBUG nova.network.neutron [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1301.604963] env[69982]: DEBUG oslo_concurrency.lockutils [req-c25e380b-1f1b-4b86-a96c-b312edb82af2 req-25e6b265-a8ed-4dbb-8bfa-063be20c13dd service nova] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.629686] env[69982]: INFO nova.compute.manager [-] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Took 1.58 seconds to deallocate network for instance. [ 1301.635689] env[69982]: DEBUG nova.network.neutron [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.704067] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52cf1727-6545-5c86-dc2a-2a74e1436add, 'name': SearchDatastore_Task, 'duration_secs': 0.011181} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.704905] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4a7bc9-0f50-4aa2-86ab-bbd00a92814b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.711301] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1301.711301] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52124974-0132-d195-e0b0-b532e6ad9ad4" [ 1301.711301] env[69982]: _type = "Task" [ 1301.711301] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.721088] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52124974-0132-d195-e0b0-b532e6ad9ad4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.844018] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1301.844318] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a80441be-9466-446d-b86d-46675fee2412 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.853771] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1301.853771] env[69982]: value = "task-3865702" [ 1301.853771] env[69982]: _type = "Task" [ 1301.853771] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.861742] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865702, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.138198] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.138461] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.138690] env[69982]: DEBUG nova.objects.instance [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lazy-loading 'resources' on Instance uuid cb226b65-d91f-4216-9844-37c22d3705a7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.140660] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Releasing lock "refresh_cache-ffb343e2-b631-4ff8-9da4-e07462d126c7" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.140894] env[69982]: DEBUG nova.compute.manager [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1302.141227] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1302.142596] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bd19b4-c6ae-40e8-9449-f65ebc361e47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.151674] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1302.152370] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b902bfb6-97ea-4c63-b83d-da45fdb46640 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.157984] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1302.157984] env[69982]: value = "task-3865703" [ 1302.157984] env[69982]: _type = "Task" [ 1302.157984] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.166739] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865703, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.222247] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52124974-0132-d195-e0b0-b532e6ad9ad4, 'name': SearchDatastore_Task, 'duration_secs': 0.01278} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.222553] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1302.222859] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c/889ff9c8-08a2-4249-ae5b-bc94bc16dc8c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1302.223243] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fd41342-b41b-479f-a348-685a16bad640 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.232047] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1302.232047] env[69982]: value = "task-3865704" [ 1302.232047] env[69982]: _type = "Task" [ 1302.232047] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.242320] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.364832] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865702, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.389226] env[69982]: DEBUG nova.compute.manager [req-faa5a314-2927-498d-a62e-9f72c9bb2f8e req-3ede5cf3-0dba-4b3b-a366-cc95c595a72d service nova] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Received event network-vif-deleted-d4331d9d-de6b-48c6-9e45-c68bc45f7209 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1302.669549] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865703, 'name': PowerOffVM_Task, 'duration_secs': 0.340651} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.672598] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1302.672787] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1302.673361] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c814b67-9069-4b87-af8b-a170f08dc8c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.703021] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1302.703332] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1302.703517] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleting the datastore file [datastore2] ffb343e2-b631-4ff8-9da4-e07462d126c7 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1302.703785] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3428228e-a8e4-418f-a13d-f2223c36fcb3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.710882] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for the task: (returnval){ [ 1302.710882] env[69982]: value = "task-3865706" [ 1302.710882] env[69982]: _type = "Task" [ 1302.710882] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.722970] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865706, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.741811] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462736} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.744430] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c/889ff9c8-08a2-4249-ae5b-bc94bc16dc8c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.744654] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.745117] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-792da877-8449-462e-a107-b8d08c4b1fc0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.751938] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1302.751938] env[69982]: value = "task-3865707" [ 1302.751938] env[69982]: _type = "Task" [ 1302.751938] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.763545] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.782241] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d498b0a6-4c07-48aa-b642-00c7ce0379a7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.790802] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f5d1b6-2902-4f37-83fa-39cad59b7d1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.822432] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd2c2ed-3417-4aec-8e22-f4effede9847 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.830577] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09af071-97d0-4034-9751-70f23b441a4d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.844728] env[69982]: DEBUG nova.compute.provider_tree [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1302.865130] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865702, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.222901] env[69982]: DEBUG oslo_vmware.api [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Task: {'id': task-3865706, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.104561} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.223320] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.223628] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1303.223918] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1303.224242] env[69982]: INFO nova.compute.manager [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1303.224633] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1303.224932] env[69982]: DEBUG nova.compute.manager [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1303.225139] env[69982]: DEBUG nova.network.neutron [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1303.240715] env[69982]: DEBUG nova.network.neutron [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1303.262317] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066093} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.262577] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1303.263402] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0681434b-7ab4-4e2c-898a-6bd50f7a90b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.286338] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c/889ff9c8-08a2-4249-ae5b-bc94bc16dc8c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.286910] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73392895-ff94-4da5-98ec-abc284da048b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.306763] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1303.306763] env[69982]: value = "task-3865708" [ 1303.306763] env[69982]: _type = "Task" [ 1303.306763] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.315282] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.365613] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865702, 'name': CloneVM_Task, 'duration_secs': 1.228022} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.365990] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Created linked-clone VM from snapshot [ 1303.366910] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6706ecb-4553-41ff-9a4f-c4dcdb473f39 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.375698] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Uploading image a22d96e8-6538-4d03-8eea-5cd269aeb9d8 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1303.378793] env[69982]: DEBUG nova.scheduler.client.report [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Updated inventory for provider 206a5498-2e79-46c1-a636-9488a05fb67d with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1303.379117] env[69982]: DEBUG nova.compute.provider_tree [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Updating resource provider 206a5498-2e79-46c1-a636-9488a05fb67d generation from 176 to 177 during operation: update_inventory {{(pid=69982) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1303.379549] env[69982]: DEBUG nova.compute.provider_tree [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Updating inventory in ProviderTree for provider 206a5498-2e79-46c1-a636-9488a05fb67d with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1303.404052] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1303.404052] env[69982]: value = "vm-768127" [ 1303.404052] env[69982]: _type = "VirtualMachine" [ 1303.404052] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1303.404328] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-77b38a33-77e6-4cce-b8ab-b45e5379e18c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.412315] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease: (returnval){ [ 1303.412315] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5223f1cb-831a-a850-7246-10e3d358aeaf" [ 1303.412315] env[69982]: _type = "HttpNfcLease" [ 1303.412315] env[69982]: } obtained for exporting VM: (result){ [ 1303.412315] env[69982]: value = "vm-768127" [ 1303.412315] env[69982]: _type = "VirtualMachine" [ 1303.412315] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1303.412630] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the lease: (returnval){ [ 1303.412630] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5223f1cb-831a-a850-7246-10e3d358aeaf" [ 1303.412630] env[69982]: _type = "HttpNfcLease" [ 1303.412630] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1303.420087] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1303.420087] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5223f1cb-831a-a850-7246-10e3d358aeaf" [ 1303.420087] env[69982]: _type = "HttpNfcLease" [ 1303.420087] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1303.743148] env[69982]: DEBUG nova.network.neutron [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.817569] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865708, 'name': ReconfigVM_Task, 'duration_secs': 0.278508} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.817839] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c/889ff9c8-08a2-4249-ae5b-bc94bc16dc8c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.818516] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff09bdfa-f760-4896-acea-faf89449fb1c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.826174] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1303.826174] env[69982]: value = "task-3865710" [ 1303.826174] env[69982]: _type = "Task" [ 1303.826174] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.834295] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865710, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.885019] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.904879] env[69982]: INFO nova.scheduler.client.report [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Deleted allocations for instance cb226b65-d91f-4216-9844-37c22d3705a7 [ 1303.920428] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1303.920428] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5223f1cb-831a-a850-7246-10e3d358aeaf" [ 1303.920428] env[69982]: _type = "HttpNfcLease" [ 1303.920428] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1303.920728] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1303.920728] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5223f1cb-831a-a850-7246-10e3d358aeaf" [ 1303.920728] env[69982]: _type = "HttpNfcLease" [ 1303.920728] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1303.921522] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d05454-3984-4ae4-9860-813b754e7891 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.930485] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1303.930759] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1304.027342] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-35a176cf-0ee4-42d6-befd-317914c6d9a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.133343] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1304.245560] env[69982]: INFO nova.compute.manager [-] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Took 1.02 seconds to deallocate network for instance. [ 1304.342676] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865710, 'name': Rename_Task, 'duration_secs': 0.15002} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.343148] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1304.343638] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e587632-c6fd-496e-a6c2-00d3748ee924 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.352933] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1304.352933] env[69982]: value = "task-3865711" [ 1304.352933] env[69982]: _type = "Task" [ 1304.352933] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.365538] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.413256] env[69982]: DEBUG oslo_concurrency.lockutils [None req-bcfd70fb-0ea5-48f9-98a7-22289e23f8ed tempest-AttachVolumeNegativeTest-1055724446 tempest-AttachVolumeNegativeTest-1055724446-project-member] Lock "cb226b65-d91f-4216-9844-37c22d3705a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.996s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.752829] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.753155] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.753382] env[69982]: DEBUG nova.objects.instance [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lazy-loading 'resources' on Instance uuid ffb343e2-b631-4ff8-9da4-e07462d126c7 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1304.864455] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865711, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.285296] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1305.366953] env[69982]: DEBUG oslo_vmware.api [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865711, 'name': PowerOnVM_Task, 'duration_secs': 0.924068} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.367354] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1305.367649] env[69982]: INFO nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Took 7.27 seconds to spawn the instance on the hypervisor. [ 1305.367872] env[69982]: DEBUG nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.368753] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaf233f-b916-4fa2-a0e6-44ab2e4c8a2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.373154] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb906e6-e4cd-49de-a0ea-f942a91213e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.383925] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1911a8-12f4-460d-bbad-9b01a39f4ec5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.422196] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac169e52-8520-424d-8df7-921f6a3b29c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.431184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d50cf8c-0d5a-4933-af29-878ab68e2775 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.449021] env[69982]: DEBUG nova.compute.provider_tree [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.899595] env[69982]: INFO nova.compute.manager [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Took 13.14 seconds to build instance. [ 1305.952944] env[69982]: DEBUG nova.scheduler.client.report [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.401595] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4090ab6a-7aa5-4e97-a4fb-96c14723c9ba tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.651s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.458880] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.483614] env[69982]: INFO nova.scheduler.client.report [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Deleted allocations for instance ffb343e2-b631-4ff8-9da4-e07462d126c7 [ 1306.993037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-3f983a04-cedd-494d-b4e5-844850903ad5 tempest-ServerShowV247Test-991955534 tempest-ServerShowV247Test-991955534-project-member] Lock "ffb343e2-b631-4ff8-9da4-e07462d126c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.438s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.280204] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.283984] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.284210] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1307.580571] env[69982]: DEBUG nova.compute.manager [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1307.580823] env[69982]: DEBUG nova.compute.manager [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1307.581022] env[69982]: DEBUG oslo_concurrency.lockutils [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.581180] env[69982]: DEBUG oslo_concurrency.lockutils [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.581347] env[69982]: DEBUG nova.network.neutron [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1308.285319] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1308.285319] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1308.596378] env[69982]: DEBUG nova.network.neutron [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1308.596791] env[69982]: DEBUG nova.network.neutron [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.100348] env[69982]: DEBUG oslo_concurrency.lockutils [req-6a467a57-8594-400a-a569-58fe31fc4194 req-cc20653c-34df-40e4-a8fd-318683a1f038 service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.284257] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.284537] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1309.610506] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1309.610962] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing instance network info cache due to event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1309.610962] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.611095] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.611271] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1309.788255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.788557] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1309.788774] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.788968] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1309.790179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af28a3b8-5401-44a3-864a-3e1756257be0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.799515] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d408f610-fb04-4b85-9ecc-c265d4764d36 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.816609] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6735fedd-167a-456d-87c5-9e5acc108a0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.824930] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74efe482-48b0-4f2a-b6ea-353b0f7a872e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.856442] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180079MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1309.856649] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1309.856853] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1310.324602] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updated VIF entry in instance network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1310.324999] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.828040] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1310.828445] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1310.828490] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing instance network info cache due to event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1310.828748] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.828937] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1310.829126] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1310.884612] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance ce24e165-230a-44bb-ae46-d1479e71585a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.884800] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 6c20ba60-6552-47f3-8eb3-a71923a0a68f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.884936] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.885069] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance e778d67f-13de-4a50-9c46-137bdbfd4ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.885278] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 2cbd6771-48dd-44bc-a0e3-96dad0a7aead actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.885434] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1310.885605] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1310.885764] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1310.972073] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea82b40f-2a0a-4147-b074-4b0de82fc6e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.980633] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2021bc-cb05-48bc-b6f1-4f0f4eea1a8e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.018135] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063b2a86-ff7b-42fd-a545-35bb034359b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.026374] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba830379-dea5-42ec-846c-700452f90b94 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.040973] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.532995] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updated VIF entry in instance network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1311.533473] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.544705] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1312.036655] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1312.037057] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1312.037125] env[69982]: DEBUG nova.compute.manager [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1312.037310] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.037458] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.037623] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1312.049999] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1312.050572] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.193s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.701808] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1312.703056] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317307a8-d742-4798-be92-b37b9af49991 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.711645] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1312.711931] env[69982]: ERROR oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk due to incomplete transfer. [ 1312.712288] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5d0947c2-f1dd-4a80-802e-61d76cf5f5ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.720706] env[69982]: DEBUG oslo_vmware.rw_handles [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e62d3c-c85c-08e0-a62f-7adc73059338/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1312.720922] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Uploaded image a22d96e8-6538-4d03-8eea-5cd269aeb9d8 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1312.723292] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1312.723708] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9b60d620-e57b-4265-9f3e-d9589b0552f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.731760] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1312.731760] env[69982]: value = "task-3865715" [ 1312.731760] env[69982]: _type = "Task" [ 1312.731760] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.741201] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865715, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.774147] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1312.774544] env[69982]: DEBUG nova.network.neutron [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.243028] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865715, 'name': Destroy_Task, 'duration_secs': 0.358234} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.243449] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Destroyed the VM [ 1313.243671] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1313.243995] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a143bb97-9f5f-494e-8e26-31370e4e7879 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.251932] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1313.251932] env[69982]: value = "task-3865716" [ 1313.251932] env[69982]: _type = "Task" [ 1313.251932] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.260628] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865716, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.277755] env[69982]: DEBUG oslo_concurrency.lockutils [req-90b2ec6b-d919-4992-979a-5967fca27f63 req-95af66d5-186a-4992-bf65-66638c74fc58 service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.762704] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865716, 'name': RemoveSnapshot_Task, 'duration_secs': 0.438691} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.762982] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1313.763287] env[69982]: DEBUG nova.compute.manager [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1313.764093] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dfb39b-3658-4b7f-a16c-4724cb31c015 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.047680] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1314.277662] env[69982]: INFO nova.compute.manager [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Shelve offloading [ 1314.782766] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1314.783300] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d160af12-5cf2-4c96-9cba-21ef4e4e5a08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.793685] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1314.793685] env[69982]: value = "task-3865717" [ 1314.793685] env[69982]: _type = "Task" [ 1314.793685] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.803528] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1314.803785] env[69982]: DEBUG nova.compute.manager [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1314.804577] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef47c2b2-20b6-458b-a3d7-4b4f4337970b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.811122] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.811285] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1314.811456] env[69982]: DEBUG nova.network.neutron [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1315.598737] env[69982]: DEBUG nova.network.neutron [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1316.103128] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.318115] env[69982]: DEBUG nova.compute.manager [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-vif-unplugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1316.318581] env[69982]: DEBUG oslo_concurrency.lockutils [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.319029] env[69982]: DEBUG oslo_concurrency.lockutils [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.319435] env[69982]: DEBUG oslo_concurrency.lockutils [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1316.319842] env[69982]: DEBUG nova.compute.manager [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] No waiting events found dispatching network-vif-unplugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1316.320242] env[69982]: WARNING nova.compute.manager [req-fce57c79-7516-459c-9e67-9116bafcde5f req-607822d2-5940-404d-af84-4a6e31aabbb0 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received unexpected event network-vif-unplugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d for instance with vm_state shelved and task_state shelving_offloading. [ 1316.432018] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1316.432938] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9e79ae-72ca-45d5-b924-aa6136d88907 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.441585] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1316.441841] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2b8fd9a-0077-4183-bbd6-bdd4cf94cf73 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.513863] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1316.514122] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1316.514350] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleting the datastore file [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1316.514637] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fd27311-c8b1-45ee-b76e-b8782ac79fc2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.523903] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1316.523903] env[69982]: value = "task-3865719" [ 1316.523903] env[69982]: _type = "Task" [ 1316.523903] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.533290] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865719, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.034481] env[69982]: DEBUG oslo_vmware.api [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865719, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143204} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.034884] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1317.034928] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1317.035081] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1317.059213] env[69982]: INFO nova.scheduler.client.report [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted allocations for instance 6c20ba60-6552-47f3-8eb3-a71923a0a68f [ 1317.564114] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.564472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.564763] env[69982]: DEBUG nova.objects.instance [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'resources' on Instance uuid 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.068120] env[69982]: DEBUG nova.objects.instance [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'numa_topology' on Instance uuid 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.349310] env[69982]: DEBUG nova.compute.manager [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1318.349310] env[69982]: DEBUG nova.compute.manager [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing instance network info cache due to event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1318.349437] env[69982]: DEBUG oslo_concurrency.lockutils [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.349582] env[69982]: DEBUG oslo_concurrency.lockutils [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1318.349745] env[69982]: DEBUG nova.network.neutron [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1318.426539] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.426864] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.427116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.427345] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1318.427564] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.429653] env[69982]: INFO nova.compute.manager [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Terminating instance [ 1318.570898] env[69982]: DEBUG nova.objects.base [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Object Instance<6c20ba60-6552-47f3-8eb3-a71923a0a68f> lazy-loaded attributes: resources,numa_topology {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1318.651843] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c50fc0-c529-459c-9c70-379fc05df1db {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.660126] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ba10d6-ff17-4002-bfa6-adc0f88649c2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.689655] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4491279-2441-44ed-8dcc-ce9ecf7cc200 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.697500] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fa7eab-281a-4122-8fac-1dd0b699a4fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.711485] env[69982]: DEBUG nova.compute.provider_tree [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.933134] env[69982]: DEBUG nova.compute.manager [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1318.933547] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1318.934618] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8187c867-80ad-43e6-8355-416b6948319f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.943832] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1318.944116] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbdc388f-bdbb-4c43-ae36-1152ba428648 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.953640] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1318.953640] env[69982]: value = "task-3865720" [ 1318.953640] env[69982]: _type = "Task" [ 1318.953640] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.962313] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.079104] env[69982]: DEBUG nova.network.neutron [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updated VIF entry in instance network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1319.079575] env[69982]: DEBUG nova.network.neutron [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd95c9544-b6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1319.214578] env[69982]: DEBUG nova.scheduler.client.report [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.333458] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1319.465215] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865720, 'name': PowerOffVM_Task, 'duration_secs': 0.200533} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.465543] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1319.465770] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1319.466085] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd149028-7165-494f-a81c-3150e73898a3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.530577] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1319.530800] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1319.530988] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleting the datastore file [datastore1] ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1319.531286] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a49b5c46-b868-46ea-92ac-3fdc5b75ca01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.538771] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1319.538771] env[69982]: value = "task-3865722" [ 1319.538771] env[69982]: _type = "Task" [ 1319.538771] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.547197] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.583117] env[69982]: DEBUG oslo_concurrency.lockutils [req-2a3748fa-c623-48b4-b496-ecdaada2eb4a req-b99ceb74-be09-42d7-8618-ad1a279a3eb4 service nova] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1319.719799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.048927] env[69982]: DEBUG oslo_vmware.api [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173367} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.049235] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1320.049430] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1320.049601] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1320.049825] env[69982]: INFO nova.compute.manager [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1320.050149] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1320.050378] env[69982]: DEBUG nova.compute.manager [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1320.050476] env[69982]: DEBUG nova.network.neutron [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1320.227800] env[69982]: DEBUG oslo_concurrency.lockutils [None req-fbab46c7-5eb3-42da-b96b-a63e72ff8c29 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.986s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.228688] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.895s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.228877] env[69982]: INFO nova.compute.manager [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Unshelving [ 1320.506156] env[69982]: DEBUG nova.compute.manager [req-b16b5bb7-afbc-454c-98e6-48982645e7f9 req-1ce5c445-bdae-4631-92b2-1700b18e3dd4 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Received event network-vif-deleted-56cebe1d-243c-4f51-b0a0-200e18141707 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1320.506156] env[69982]: INFO nova.compute.manager [req-b16b5bb7-afbc-454c-98e6-48982645e7f9 req-1ce5c445-bdae-4631-92b2-1700b18e3dd4 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Neutron deleted interface 56cebe1d-243c-4f51-b0a0-200e18141707; detaching it from the instance and deleting it from the info cache [ 1320.506322] env[69982]: DEBUG nova.network.neutron [req-b16b5bb7-afbc-454c-98e6-48982645e7f9 req-1ce5c445-bdae-4631-92b2-1700b18e3dd4 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.983460] env[69982]: DEBUG nova.network.neutron [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1321.008981] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cfb2354-23ee-44fa-bfcc-d7ef66d63321 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.019096] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef543893-2ef7-4815-8e99-ce2bf238925d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.047490] env[69982]: DEBUG nova.compute.manager [req-b16b5bb7-afbc-454c-98e6-48982645e7f9 req-1ce5c445-bdae-4631-92b2-1700b18e3dd4 service nova] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Detach interface failed, port_id=56cebe1d-243c-4f51-b0a0-200e18141707, reason: Instance ce24e165-230a-44bb-ae46-d1479e71585a could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1321.254515] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.254849] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.255076] env[69982]: DEBUG nova.objects.instance [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'pci_requests' on Instance uuid 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.486622] env[69982]: INFO nova.compute.manager [-] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Took 1.44 seconds to deallocate network for instance. [ 1321.760115] env[69982]: DEBUG nova.objects.instance [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'numa_topology' on Instance uuid 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.992832] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.262711] env[69982]: INFO nova.compute.claims [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1323.163090] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1323.163358] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1323.163688] env[69982]: DEBUG nova.objects.instance [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid e778d67f-13de-4a50-9c46-137bdbfd4ddf {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.365037] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2ff472-631e-42e8-8329-d4fcd22ff4ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.374952] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c78642-82c9-47ad-b1fa-b0eb81d0bd46 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.404518] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebfa60f-2911-4deb-a07a-499ceaf99af7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.412285] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66174cb-b001-45b6-816c-4a922a1aa338 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.425966] env[69982]: DEBUG nova.compute.provider_tree [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1323.757622] env[69982]: DEBUG nova.objects.instance [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid e778d67f-13de-4a50-9c46-137bdbfd4ddf {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1323.929041] env[69982]: DEBUG nova.scheduler.client.report [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1324.260554] env[69982]: DEBUG nova.objects.base [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1324.260773] env[69982]: DEBUG nova.network.neutron [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1324.323518] env[69982]: DEBUG nova.policy [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1324.433901] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.179s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1324.436458] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.444s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1324.436731] env[69982]: DEBUG nova.objects.instance [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'resources' on Instance uuid ce24e165-230a-44bb-ae46-d1479e71585a {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1324.470400] env[69982]: INFO nova.network.neutron [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating port d95c9544-b6cb-4f15-adb5-ef91c7ef325d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1325.023028] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaa0257-e0a0-4425-ba5a-217e058a51ba {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.033011] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c8a8f1-1695-416b-a30e-ab01e60d9c15 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.064213] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13b4b86-73c1-4708-b4d3-6351ec408b3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.073162] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332095ff-e4b0-4dd8-80f5-28d8cc1d489c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.088134] env[69982]: DEBUG nova.compute.provider_tree [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1325.591348] env[69982]: DEBUG nova.scheduler.client.report [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1325.697288] env[69982]: DEBUG nova.compute.manager [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1325.697511] env[69982]: DEBUG oslo_concurrency.lockutils [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.697710] env[69982]: DEBUG oslo_concurrency.lockutils [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.697874] env[69982]: DEBUG oslo_concurrency.lockutils [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.698098] env[69982]: DEBUG nova.compute.manager [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] No waiting events found dispatching network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1325.698284] env[69982]: WARNING nova.compute.manager [req-385e53b3-7315-4628-9840-6f2fb8980b4f req-eb1ba1a2-86ee-4972-bd0a-1c9f028c356e service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received unexpected event network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 for instance with vm_state active and task_state None. [ 1325.782744] env[69982]: DEBUG nova.network.neutron [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Successfully updated port: b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.876310] env[69982]: DEBUG nova.compute.manager [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1325.876581] env[69982]: DEBUG oslo_concurrency.lockutils [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1325.876809] env[69982]: DEBUG oslo_concurrency.lockutils [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1325.877089] env[69982]: DEBUG oslo_concurrency.lockutils [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1325.877370] env[69982]: DEBUG nova.compute.manager [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] No waiting events found dispatching network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1325.877482] env[69982]: WARNING nova.compute.manager [req-c730a082-4d82-43f0-91e1-28ec12d9f8ba req-af34e660-8ddb-4183-94eb-bcd96ecb824e service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received unexpected event network-vif-plugged-d95c9544-b6cb-4f15-adb5-ef91c7ef325d for instance with vm_state shelved_offloaded and task_state spawning. [ 1325.965252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.965553] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1325.965711] env[69982]: DEBUG nova.network.neutron [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1326.096453] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.660s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.116986] env[69982]: INFO nova.scheduler.client.report [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleted allocations for instance ce24e165-230a-44bb-ae46-d1479e71585a [ 1326.285020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.285259] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1326.285486] env[69982]: DEBUG nova.network.neutron [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1326.626692] env[69982]: DEBUG oslo_concurrency.lockutils [None req-aae4a238-8837-4eae-8965-624b21adb03e tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "ce24e165-230a-44bb-ae46-d1479e71585a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.200s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1326.694309] env[69982]: DEBUG nova.network.neutron [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1326.828795] env[69982]: WARNING nova.network.neutron [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1327.104697] env[69982]: DEBUG nova.network.neutron [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "address": "fa:16:3e:38:0c:1a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ab07e2-91", "ovs_interfaceid": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.197179] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.225216] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='04137a49d1f741c379ecf89a06f83d03',container_format='bare',created_at=2025-05-07T07:16:10Z,direct_url=,disk_format='vmdk',id=a22d96e8-6538-4d03-8eea-5cd269aeb9d8,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-675813854-shelved',owner='820dcd8333cb4a678ef562e4150518d2',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-05-07T07:16:25Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1327.225733] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.225733] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1327.225940] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.225979] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1327.226132] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1327.226382] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1327.226569] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1327.226755] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1327.226923] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1327.227110] env[69982]: DEBUG nova.virt.hardware [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1327.228016] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdd4c5f-f769-49b9-bb74-6c4f55d6ba4e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.237699] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f210b8d6-17db-4ca9-889a-69e80ba28002 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.253698] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:ed:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '450939f7-f74b-41f7-93f7-b4fde6a6fbed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd95c9544-b6cb-4f15-adb5-ef91c7ef325d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1327.261557] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1327.261857] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1327.262095] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-210520a6-5dbf-4bcf-9dda-8244c2be081d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.281704] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1327.281704] env[69982]: value = "task-3865723" [ 1327.281704] env[69982]: _type = "Task" [ 1327.281704] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.290180] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865723, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.608306] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1327.609016] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.609195] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.610061] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f712412b-6624-405d-a1c6-3f1a744536b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.629359] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1327.629821] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.629821] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1327.630031] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.630143] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1327.630298] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1327.630688] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1327.630688] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1327.630839] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1327.630959] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1327.631154] env[69982]: DEBUG nova.virt.hardware [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1327.637832] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfiguring VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1327.638547] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3ecbde0-517b-4d7e-a33f-4b7cc7f7414b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.657040] env[69982]: DEBUG oslo_vmware.api [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1327.657040] env[69982]: value = "task-3865724" [ 1327.657040] env[69982]: _type = "Task" [ 1327.657040] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.665548] env[69982]: DEBUG oslo_vmware.api [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865724, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.723320] env[69982]: DEBUG nova.compute.manager [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1327.723320] env[69982]: DEBUG nova.compute.manager [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-b2ab07e2-9194-42f9-96cd-d32a4aceab17. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1327.723452] env[69982]: DEBUG oslo_concurrency.lockutils [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.723487] env[69982]: DEBUG oslo_concurrency.lockutils [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.723632] env[69982]: DEBUG nova.network.neutron [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1327.795288] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865723, 'name': CreateVM_Task, 'duration_secs': 0.303073} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.795522] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1327.796186] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.796357] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1327.796881] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1327.797202] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-830bff47-05b7-45da-b8f4-17e340d3b383 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.802402] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1327.802402] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238a87c-fbdb-f5bb-318f-f0ec5dbff0cc" [ 1327.802402] env[69982]: _type = "Task" [ 1327.802402] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.811106] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5238a87c-fbdb-f5bb-318f-f0ec5dbff0cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.042783] env[69982]: DEBUG nova.compute.manager [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1328.042849] env[69982]: DEBUG nova.compute.manager [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing instance network info cache due to event network-changed-d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1328.043184] env[69982]: DEBUG oslo_concurrency.lockutils [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] Acquiring lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.043385] env[69982]: DEBUG oslo_concurrency.lockutils [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] Acquired lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.043554] env[69982]: DEBUG nova.network.neutron [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Refreshing network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1328.170509] env[69982]: DEBUG oslo_vmware.api [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865724, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.314447] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1328.315470] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Processing image a22d96e8-6538-4d03-8eea-5cd269aeb9d8 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1328.315470] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.315470] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1328.315470] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1328.315828] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74da4223-44e9-4501-9435-c73a7d3e5f1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.326151] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1328.326358] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1328.327138] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c477170-7731-4c86-8764-61a6d452a3e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.333058] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1328.333058] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52790c80-065c-2bae-dc06-41d0bab9cbee" [ 1328.333058] env[69982]: _type = "Task" [ 1328.333058] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.343718] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52790c80-065c-2bae-dc06-41d0bab9cbee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.471511] env[69982]: DEBUG nova.network.neutron [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port b2ab07e2-9194-42f9-96cd-d32a4aceab17. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1328.472077] env[69982]: DEBUG nova.network.neutron [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "address": "fa:16:3e:38:0c:1a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ab07e2-91", "ovs_interfaceid": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.627157] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1328.627410] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1328.668209] env[69982]: DEBUG oslo_vmware.api [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865724, 'name': ReconfigVM_Task, 'duration_secs': 0.646815} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.670708] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1328.670937] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfigured VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1328.754849] env[69982]: DEBUG nova.network.neutron [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updated VIF entry in instance network info cache for port d95c9544-b6cb-4f15-adb5-ef91c7ef325d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1328.755246] env[69982]: DEBUG nova.network.neutron [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [{"id": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "address": "fa:16:3e:f9:ed:3b", "network": {"id": "d9047f76-b8eb-4e71-916b-1c691d42b8b5", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1045037563-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "820dcd8333cb4a678ef562e4150518d2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "450939f7-f74b-41f7-93f7-b4fde6a6fbed", "external-id": "nsx-vlan-transportzone-866", "segmentation_id": 866, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd95c9544-b6", "ovs_interfaceid": "d95c9544-b6cb-4f15-adb5-ef91c7ef325d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.844304] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1328.844582] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Fetch image to [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c/OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1328.844775] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Downloading stream optimized image a22d96e8-6538-4d03-8eea-5cd269aeb9d8 to [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c/OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c.vmdk on the data store datastore1 as vApp {{(pid=69982) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1328.844942] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Downloading image file data a22d96e8-6538-4d03-8eea-5cd269aeb9d8 to the ESX as VM named 'OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c' {{(pid=69982) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1328.928709] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1328.928709] env[69982]: value = "resgroup-9" [ 1328.928709] env[69982]: _type = "ResourcePool" [ 1328.928709] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1328.929408] env[69982]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-960db175-1be4-4660-80e7-76f74673fb6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.951074] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease: (returnval){ [ 1328.951074] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bf5b5-558d-7ee4-5389-e5b21ee22b54" [ 1328.951074] env[69982]: _type = "HttpNfcLease" [ 1328.951074] env[69982]: } obtained for vApp import into resource pool (val){ [ 1328.951074] env[69982]: value = "resgroup-9" [ 1328.951074] env[69982]: _type = "ResourcePool" [ 1328.951074] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1328.951500] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the lease: (returnval){ [ 1328.951500] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bf5b5-558d-7ee4-5389-e5b21ee22b54" [ 1328.951500] env[69982]: _type = "HttpNfcLease" [ 1328.951500] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1328.957899] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1328.957899] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bf5b5-558d-7ee4-5389-e5b21ee22b54" [ 1328.957899] env[69982]: _type = "HttpNfcLease" [ 1328.957899] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1328.975772] env[69982]: DEBUG oslo_concurrency.lockutils [req-727da1de-a10d-4bc5-8ddb-32aadff7c410 req-12f5a539-f87b-4e09-a49a-c897e3e03f35 service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.130473] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1329.175275] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ecbe7892-341b-4274-9453-02120a4953ec tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.012s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1329.258370] env[69982]: DEBUG oslo_concurrency.lockutils [req-333da1b9-fb0f-4448-86ab-6a0588611cdc req-696de46e-4d50-4fec-b4b3-2da8bbdab102 service nova] Releasing lock "refresh_cache-6c20ba60-6552-47f3-8eb3-a71923a0a68f" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1329.460779] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1329.460779] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bf5b5-558d-7ee4-5389-e5b21ee22b54" [ 1329.460779] env[69982]: _type = "HttpNfcLease" [ 1329.460779] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1329.461177] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1329.461177] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525bf5b5-558d-7ee4-5389-e5b21ee22b54" [ 1329.461177] env[69982]: _type = "HttpNfcLease" [ 1329.461177] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1329.461841] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf418ae-7a1b-41cb-a0ed-07d4c274792c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.469631] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1329.469808] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1329.531834] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-481523bb-00cf-454c-9881-70555211613d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.653937] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.654240] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.655958] env[69982]: INFO nova.compute.claims [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1329.757629] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1329.758047] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.262625] env[69982]: INFO nova.compute.manager [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Detaching volume faf3518e-f73e-4b06-a226-42e4c81f1235 [ 1330.311160] env[69982]: INFO nova.virt.block_device [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Attempting to driver detach volume faf3518e-f73e-4b06-a226-42e4c81f1235 from mountpoint /dev/sdb [ 1330.311656] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1330.312157] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768114', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'name': 'volume-faf3518e-f73e-4b06-a226-42e4c81f1235', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f', 'attached_at': '', 'detached_at': '', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'serial': 'faf3518e-f73e-4b06-a226-42e4c81f1235'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1330.313825] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44fcd9e-ddd1-4b63-a700-67d0a1ae3cd1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.346543] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e506e7c-6a3c-4e55-b388-895957c8dd47 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.354285] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7553b38f-5262-4b2b-b7ae-49feae7e3594 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.377509] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e506e79c-8572-47b8-a829-7cf06f0c7988 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.394520] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] The volume has not been displaced from its original location: [datastore2] volume-faf3518e-f73e-4b06-a226-42e4c81f1235/volume-faf3518e-f73e-4b06-a226-42e4c81f1235.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1330.400488] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfiguring VM instance instance-00000072 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1330.403963] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fefe43ec-64e9-4827-85b9-700d9573c2ce {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.430849] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1330.430849] env[69982]: value = "task-3865726" [ 1330.430849] env[69982]: _type = "Task" [ 1330.430849] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.442808] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865726, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.733960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.734264] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.734447] env[69982]: DEBUG nova.compute.manager [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1330.735766] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0439286e-e4f1-44d3-86c6-d431565c509c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.740163] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1330.740357] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1330.741262] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e111ed0-b000-4a1f-abca-50ede490656e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.751426] env[69982]: DEBUG nova.compute.manager [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69982) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1330.752014] env[69982]: DEBUG nova.objects.instance [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'flavor' on Instance uuid 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1330.753375] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1330.753539] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1330.753987] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-c845f5aa-0c84-4464-986b-28a5dea0defa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.772748] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b37699-5b00-404f-bf9b-739742393774 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.781374] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2597aae-2cb5-4ea7-8c78-e7409874fff9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.815691] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5593b47-5900-4978-ab1f-284cdc62df75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.823957] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ead9c8-0975-43e9-bc85-f0e1ea8c22b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.838023] env[69982]: DEBUG nova.compute.provider_tree [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1330.927514] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.927871] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.945491] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.947100] env[69982]: DEBUG oslo_vmware.rw_handles [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52843097-39c9-1127-3865-f3095fb06aa8/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1330.947306] env[69982]: INFO nova.virt.vmwareapi.images [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Downloaded image file data a22d96e8-6538-4d03-8eea-5cd269aeb9d8 [ 1330.948120] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e64023d-b195-4587-9489-d9fab030ede5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.964958] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05523a95-a442-4fdd-9084-91415cd0a776 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.988609] env[69982]: INFO nova.virt.vmwareapi.images [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] The imported VM was unregistered [ 1330.991046] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1330.991295] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Creating directory with path [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.991570] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba058d17-bde1-454d-a245-d83f333cecff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.012136] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Created directory with path [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.012345] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c/OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c.vmdk to [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk. {{(pid=69982) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1331.012612] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-97a6d0a8-8103-480b-ba16-790c547573ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.021068] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1331.021068] env[69982]: value = "task-3865728" [ 1331.021068] env[69982]: _type = "Task" [ 1331.021068] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.029514] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.341440] env[69982]: DEBUG nova.scheduler.client.report [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1331.430573] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.430775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1331.431719] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc99d032-996b-4287-b092-497bfb2356cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.443902] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865726, 'name': ReconfigVM_Task, 'duration_secs': 0.57281} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.457367] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Reconfigured VM instance instance-00000072 to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1331.462298] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e4d5667-3481-40e4-9167-6674488954a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.472980] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23a98ae-1d54-4abc-be08-56646bc58ad0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.501331] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfiguring VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1331.503099] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f267544-0aac-401a-ae13-291bc21abf3d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.517081] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1331.517081] env[69982]: value = "task-3865729" [ 1331.517081] env[69982]: _type = "Task" [ 1331.517081] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.529432] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1331.529432] env[69982]: value = "task-3865730" [ 1331.529432] env[69982]: _type = "Task" [ 1331.529432] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.529782] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.537110] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.540489] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.760193] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.760535] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fde6044-dfff-44d9-828d-03b40327bd3b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.768949] env[69982]: DEBUG oslo_vmware.api [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1331.768949] env[69982]: value = "task-3865731" [ 1331.768949] env[69982]: _type = "Task" [ 1331.768949] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.778702] env[69982]: DEBUG oslo_vmware.api [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.848103] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1331.848742] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1332.029817] env[69982]: DEBUG oslo_vmware.api [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865729, 'name': ReconfigVM_Task, 'duration_secs': 0.20243} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.030609] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768114', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'name': 'volume-faf3518e-f73e-4b06-a226-42e4c81f1235', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f', 'attached_at': '', 'detached_at': '', 'volume_id': 'faf3518e-f73e-4b06-a226-42e4c81f1235', 'serial': 'faf3518e-f73e-4b06-a226-42e4c81f1235'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1332.039570] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.045329] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.279077] env[69982]: DEBUG oslo_vmware.api [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865731, 'name': PowerOffVM_Task, 'duration_secs': 0.231475} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.279350] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.279583] env[69982]: DEBUG nova.compute.manager [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1332.280453] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b097a028-1e3c-49e5-9182-1f001fdab325 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.354316] env[69982]: DEBUG nova.compute.utils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1332.355816] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1332.355986] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1332.397323] env[69982]: DEBUG nova.policy [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'db5e62cfeadb4f8290fea53c93fd0189', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd6d2d65079fb46d8a9b1a31d2eab9829', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1332.532260] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.542448] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.582786] env[69982]: DEBUG nova.objects.instance [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'flavor' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.736555] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Successfully created port: 6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1332.796914] env[69982]: DEBUG oslo_concurrency.lockutils [None req-c08b2617-7903-422d-b546-7dd233972545 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.860755] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1333.035203] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.045723] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.227185] env[69982]: DEBUG nova.objects.instance [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'flavor' on Instance uuid 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.534107] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865728, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.376731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.534258] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c/OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c.vmdk to [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk. [ 1333.534457] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Cleaning up location [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1333.534622] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b3924acb-7b44-4c67-85f3-0d896f908e7c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1333.534895] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7902dbd8-31de-42c6-9771-c3d242aeaa6a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.546836] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.548348] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1333.548348] env[69982]: value = "task-3865732" [ 1333.548348] env[69982]: _type = "Task" [ 1333.548348] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.556370] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.590482] env[69982]: DEBUG oslo_concurrency.lockutils [None req-9a34a873-7010-4763-803f-c7d757c57453 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.832s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1333.732639] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.732819] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1333.732985] env[69982]: DEBUG nova.network.neutron [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1333.733182] env[69982]: DEBUG nova.objects.instance [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'info_cache' on Instance uuid 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1333.872439] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1333.901109] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1333.901479] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1333.901585] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1333.901702] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1333.901846] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1333.901991] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1333.902220] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1333.902379] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1333.903015] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1333.903015] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1333.903015] env[69982]: DEBUG nova.virt.hardware [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1333.904012] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19918d9b-6797-4545-bd59-c460b644d8c6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.913522] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d5eab9-b778-4bee-92e6-0e13cc880560 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.046740] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.058008] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039626} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.058286] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1334.058454] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1334.058713] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk to [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1334.059020] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b982fce3-f71c-474e-a8cc-81cb6ba6a5ca {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.066613] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1334.066613] env[69982]: value = "task-3865733" [ 1334.066613] env[69982]: _type = "Task" [ 1334.066613] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.074696] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.110896] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.111144] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.111383] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.112175] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.112372] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.115134] env[69982]: INFO nova.compute.manager [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Terminating instance [ 1334.205681] env[69982]: DEBUG nova.compute.manager [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Received event network-vif-plugged-6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1334.205904] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.206122] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.206292] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1334.206490] env[69982]: DEBUG nova.compute.manager [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] No waiting events found dispatching network-vif-plugged-6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1334.206682] env[69982]: WARNING nova.compute.manager [req-2c30c717-60d5-437a-af03-5e2f86e4d600 req-f1907e32-c790-4255-a16b-a33bae6bd425 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Received unexpected event network-vif-plugged-6c285c0e-e718-4241-ac88-8de36e44a62f for instance with vm_state building and task_state spawning. [ 1334.236716] env[69982]: DEBUG nova.objects.base [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Object Instance<2cbd6771-48dd-44bc-a0e3-96dad0a7aead> lazy-loaded attributes: flavor,info_cache {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1334.548266] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.579145] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.580312] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Successfully updated port: 6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1334.619769] env[69982]: DEBUG nova.compute.manager [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1334.619769] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1334.621203] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf8cd76-734d-4e35-8ddd-3572957f6af7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.631113] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1334.631438] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7b699267-0452-40cf-b57c-c04f55fb00c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.640214] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1334.640214] env[69982]: value = "task-3865734" [ 1334.640214] env[69982]: _type = "Task" [ 1334.640214] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.653601] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.978043] env[69982]: DEBUG nova.network.neutron [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.049195] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.077668] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.083472] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.083630] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1335.083804] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1335.150150] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865734, 'name': PowerOffVM_Task, 'duration_secs': 0.225038} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.150416] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1335.150587] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1335.150864] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f201b7ba-40ec-4129-83d5-95bf57176af4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.218944] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1335.218944] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1335.219263] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore2] 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1335.219570] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41c4474d-6ecc-4329-a933-2bd631c363e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.228132] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1335.228132] env[69982]: value = "task-3865736" [ 1335.228132] env[69982]: _type = "Task" [ 1335.228132] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.237659] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.480736] env[69982]: DEBUG oslo_concurrency.lockutils [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1335.550604] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.578927] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.628277] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1335.739171] env[69982]: DEBUG oslo_vmware.api [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24404} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.739459] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.739647] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.739823] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.739999] env[69982]: INFO nova.compute.manager [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1335.740319] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1335.743164] env[69982]: DEBUG nova.compute.manager [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1335.743283] env[69982]: DEBUG nova.network.neutron [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1335.801021] env[69982]: DEBUG nova.network.neutron [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating instance_info_cache with network_info: [{"id": "6c285c0e-e718-4241-ac88-8de36e44a62f", "address": "fa:16:3e:da:4e:83", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c285c0e-e7", "ovs_interfaceid": "6c285c0e-e718-4241-ac88-8de36e44a62f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.050849] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.078695] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.304669] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1336.304826] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Instance network_info: |[{"id": "6c285c0e-e718-4241-ac88-8de36e44a62f", "address": "fa:16:3e:da:4e:83", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c285c0e-e7", "ovs_interfaceid": "6c285c0e-e718-4241-ac88-8de36e44a62f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1336.305508] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:4e:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c285c0e-e718-4241-ac88-8de36e44a62f', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1336.314731] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1336.315931] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1336.316256] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7264728d-cbf8-4159-849d-25efe890782f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.335689] env[69982]: DEBUG nova.compute.manager [req-58ffc25a-fe12-4345-bbc7-1d31df73995f req-6dcf11ba-0561-4ea9-a155-0c8def48ba94 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Received event network-vif-deleted-f973e93a-f258-4ccd-a732-c323a3202bb3 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.336142] env[69982]: INFO nova.compute.manager [req-58ffc25a-fe12-4345-bbc7-1d31df73995f req-6dcf11ba-0561-4ea9-a155-0c8def48ba94 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Neutron deleted interface f973e93a-f258-4ccd-a732-c323a3202bb3; detaching it from the instance and deleting it from the info cache [ 1336.336490] env[69982]: DEBUG nova.network.neutron [req-58ffc25a-fe12-4345-bbc7-1d31df73995f req-6dcf11ba-0561-4ea9-a155-0c8def48ba94 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.344344] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1336.344344] env[69982]: value = "task-3865737" [ 1336.344344] env[69982]: _type = "Task" [ 1336.344344] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.356588] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865737, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.480438] env[69982]: DEBUG nova.compute.manager [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Received event network-changed-6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1336.480632] env[69982]: DEBUG nova.compute.manager [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Refreshing instance network info cache due to event network-changed-6c285c0e-e718-4241-ac88-8de36e44a62f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1336.480842] env[69982]: DEBUG oslo_concurrency.lockutils [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] Acquiring lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.480980] env[69982]: DEBUG oslo_concurrency.lockutils [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] Acquired lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.481223] env[69982]: DEBUG nova.network.neutron [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Refreshing network info cache for port 6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1336.487008] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.487340] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85029af4-5ff5-4e63-91a9-c7211fea75a0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.495490] env[69982]: DEBUG oslo_vmware.api [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1336.495490] env[69982]: value = "task-3865738" [ 1336.495490] env[69982]: _type = "Task" [ 1336.495490] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.504835] env[69982]: DEBUG oslo_vmware.api [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.552854] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.578890] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865733, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.33172} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.579235] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a22d96e8-6538-4d03-8eea-5cd269aeb9d8/a22d96e8-6538-4d03-8eea-5cd269aeb9d8.vmdk to [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1336.580113] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7a76ab-1b21-46c2-ad73-78b2e2ac063b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.602989] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1336.603361] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adb8d9c6-d7a7-4d92-a020-44e3121955a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.625782] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1336.625782] env[69982]: value = "task-3865739" [ 1336.625782] env[69982]: _type = "Task" [ 1336.625782] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.635910] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.806144] env[69982]: DEBUG nova.network.neutron [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.839730] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d87a805-bd19-47ca-92e1-df6223a41328 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.852587] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e458c9-3ea3-4ed9-bc38-a6d5fcda9d51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.867410] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865737, 'name': CreateVM_Task, 'duration_secs': 0.401395} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.868134] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1336.869075] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.869301] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1336.869691] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1336.869983] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca05ad80-875f-48b3-9d98-9346c194e76c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.875790] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1336.875790] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0b856-89cc-03c0-5e67-5ec9e3ce4446" [ 1336.875790] env[69982]: _type = "Task" [ 1336.875790] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.890218] env[69982]: DEBUG nova.compute.manager [req-58ffc25a-fe12-4345-bbc7-1d31df73995f req-6dcf11ba-0561-4ea9-a155-0c8def48ba94 service nova] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Detach interface failed, port_id=f973e93a-f258-4ccd-a732-c323a3202bb3, reason: Instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1336.894203] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0b856-89cc-03c0-5e67-5ec9e3ce4446, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.007814] env[69982]: DEBUG oslo_vmware.api [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865738, 'name': PowerOnVM_Task, 'duration_secs': 0.461885} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.008158] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.008412] env[69982]: DEBUG nova.compute.manager [None req-ebca8d8e-19a5-4d0a-857d-4ce7f0b07afc tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1337.009281] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88e6e60-59e2-4db7-a6e6-08448f2fcabc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.053054] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.138694] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.227176] env[69982]: DEBUG nova.network.neutron [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updated VIF entry in instance network info cache for port 6c285c0e-e718-4241-ac88-8de36e44a62f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1337.227553] env[69982]: DEBUG nova.network.neutron [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating instance_info_cache with network_info: [{"id": "6c285c0e-e718-4241-ac88-8de36e44a62f", "address": "fa:16:3e:da:4e:83", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c285c0e-e7", "ovs_interfaceid": "6c285c0e-e718-4241-ac88-8de36e44a62f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1337.308575] env[69982]: INFO nova.compute.manager [-] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Took 1.57 seconds to deallocate network for instance. [ 1337.386115] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52c0b856-89cc-03c0-5e67-5ec9e3ce4446, 'name': SearchDatastore_Task, 'duration_secs': 0.012517} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.386427] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.386705] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.386960] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.387119] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1337.387301] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.387588] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f433a4b-df99-4edc-8f87-deb32763460d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.396456] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.396697] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.397475] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa561bf1-5a0d-4b59-84dd-5c86e4fb0f56 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.402828] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1337.402828] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524262a8-1953-0f42-644b-e69766721227" [ 1337.402828] env[69982]: _type = "Task" [ 1337.402828] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.410994] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524262a8-1953-0f42-644b-e69766721227, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.553471] env[69982]: DEBUG oslo_vmware.api [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865730, 'name': ReconfigVM_Task, 'duration_secs': 5.806731} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.553730] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.553935] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Reconfigured VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1337.635208] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865739, 'name': ReconfigVM_Task, 'duration_secs': 0.752741} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.635791] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f/6c20ba60-6552-47f3-8eb3-a71923a0a68f.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1337.636417] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6429d6f3-1f75-43e8-9303-9edb8248263c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.642762] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1337.642762] env[69982]: value = "task-3865740" [ 1337.642762] env[69982]: _type = "Task" [ 1337.642762] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.650716] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865740, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.730448] env[69982]: DEBUG oslo_concurrency.lockutils [req-401df62a-e8d4-4855-b191-4723c0cc49de req-5d9951c7-816b-430a-af83-e6b6eccace29 service nova] Releasing lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.816335] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1337.816777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.817035] env[69982]: DEBUG nova.objects.instance [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'resources' on Instance uuid 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.912424] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524262a8-1953-0f42-644b-e69766721227, 'name': SearchDatastore_Task, 'duration_secs': 0.009874} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.913351] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da11212-3ab5-4057-9bf0-a53355570861 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.919555] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1337.919555] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f02b00-3f41-0e34-17ba-fdb38d93d695" [ 1337.919555] env[69982]: _type = "Task" [ 1337.919555] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.927413] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f02b00-3f41-0e34-17ba-fdb38d93d695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.097550] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7546188e-f0f3-44dd-b8a8-0e0c5fbebd88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.104272] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Suspending the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1338.104545] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-eaa2bca7-5c7e-4d05-b595-ca517d3e5ec0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.110860] env[69982]: DEBUG oslo_vmware.api [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1338.110860] env[69982]: value = "task-3865741" [ 1338.110860] env[69982]: _type = "Task" [ 1338.110860] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.119975] env[69982]: DEBUG oslo_vmware.api [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865741, 'name': SuspendVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.153476] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865740, 'name': Rename_Task, 'duration_secs': 0.18344} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.153819] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1338.154109] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9522bff-2219-4f89-9748-716be41f1994 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.161396] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1338.161396] env[69982]: value = "task-3865742" [ 1338.161396] env[69982]: _type = "Task" [ 1338.161396] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.170144] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.432310] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52f02b00-3f41-0e34-17ba-fdb38d93d695, 'name': SearchDatastore_Task, 'duration_secs': 0.010109} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.433620] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1338.433944] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8f0dacd5-59ec-495e-bac7-df2b76883562/8f0dacd5-59ec-495e-bac7-df2b76883562.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.434795] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db878ea9-c128-455b-994e-a45f0eab2acd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.437877] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44902dcb-daef-4038-b111-ae0aabe717cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.447819] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b03584b-30d0-4d86-8b1c-ed0819adf562 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.451458] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1338.451458] env[69982]: value = "task-3865743" [ 1338.451458] env[69982]: _type = "Task" [ 1338.451458] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.481997] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c71ae2f-6312-43de-a0a3-f273fe9aec9c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.487828] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.493082] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1c2409-080c-4460-afb9-751fa9392f26 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.507486] env[69982]: DEBUG nova.compute.provider_tree [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.621408] env[69982]: DEBUG oslo_vmware.api [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865741, 'name': SuspendVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.671867] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865742, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.878747] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1338.879049] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1338.879278] env[69982]: DEBUG nova.network.neutron [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1338.963809] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470115} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.964111] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 8f0dacd5-59ec-495e-bac7-df2b76883562/8f0dacd5-59ec-495e-bac7-df2b76883562.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1338.964345] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1338.964606] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b53122e-3690-429e-ae51-8af7cf764c7f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.972313] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1338.972313] env[69982]: value = "task-3865744" [ 1338.972313] env[69982]: _type = "Task" [ 1338.972313] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.982650] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.010955] env[69982]: DEBUG nova.scheduler.client.report [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1339.121660] env[69982]: DEBUG oslo_vmware.api [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865741, 'name': SuspendVM_Task, 'duration_secs': 0.734043} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.122131] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Suspended the VM {{(pid=69982) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1339.122131] env[69982]: DEBUG nova.compute.manager [None req-7947f62d-a481-4c4c-93c2-0967d9951f64 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1339.122837] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9082ea1b-d0e0-4d79-ab6c-8543c5c1e4b9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.173309] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865742, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.486447] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066026} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.486900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.487838] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172f1578-6b1f-44d0-8e08-c9d91fce0d79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.512395] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 8f0dacd5-59ec-495e-bac7-df2b76883562/8f0dacd5-59ec-495e-bac7-df2b76883562.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.515436] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5aacf60a-c279-42fb-8d22-3ef614c279b3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.533281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.716s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.549845] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1339.549845] env[69982]: value = "task-3865745" [ 1339.549845] env[69982]: _type = "Task" [ 1339.549845] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.558680] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.624424] env[69982]: INFO nova.scheduler.client.report [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted allocations for instance 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f [ 1339.673444] env[69982]: DEBUG oslo_vmware.api [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865742, 'name': PowerOnVM_Task, 'duration_secs': 1.16544} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.673991] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1339.781347] env[69982]: DEBUG nova.compute.manager [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1339.782586] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa7ef09-a595-43c5-950a-6653374423a5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.058387] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865745, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.138550] env[69982]: DEBUG oslo_concurrency.lockutils [None req-15481276-04cc-4488-916b-5985ae098777 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.027s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.148488] env[69982]: DEBUG nova.compute.manager [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1340.148744] env[69982]: DEBUG nova.compute.manager [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing instance network info cache due to event network-changed-90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1340.148947] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] Acquiring lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.152556] env[69982]: INFO nova.network.neutron [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Port b2ab07e2-9194-42f9-96cd-d32a4aceab17 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1340.153010] env[69982]: DEBUG nova.network.neutron [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.301974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-48b0ae31-500f-4352-9ebc-5eb0e25c4bf2 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.073s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1340.558591] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865745, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.656168] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1340.658253] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] Acquired lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1340.658462] env[69982]: DEBUG nova.network.neutron [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Refreshing network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1340.745615] env[69982]: INFO nova.compute.manager [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Resuming [ 1340.746275] env[69982]: DEBUG nova.objects.instance [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'flavor' on Instance uuid 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1340.919669] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.920037] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.920654] env[69982]: DEBUG nova.objects.instance [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'flavor' on Instance uuid 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.061383] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865745, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.161489] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f970fd40-b921-4749-9e09-ac091bd5dd20 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-e778d67f-13de-4a50-9c46-137bdbfd4ddf-b2ab07e2-9194-42f9-96cd-d32a4aceab17" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.233s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.431759] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.432020] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.432255] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.432444] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.432609] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.435094] env[69982]: INFO nova.compute.manager [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Terminating instance [ 1341.470539] env[69982]: DEBUG nova.network.neutron [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updated VIF entry in instance network info cache for port 90606ccc-f773-4313-9003-0b3239a7ca18. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1341.470891] env[69982]: DEBUG nova.network.neutron [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [{"id": "90606ccc-f773-4313-9003-0b3239a7ca18", "address": "fa:16:3e:53:cb:40", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90606ccc-f7", "ovs_interfaceid": "90606ccc-f773-4313-9003-0b3239a7ca18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.560975] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865745, 'name': ReconfigVM_Task, 'duration_secs': 1.576071} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.561551] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 8f0dacd5-59ec-495e-bac7-df2b76883562/8f0dacd5-59ec-495e-bac7-df2b76883562.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.562349] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-14530e75-0397-41ba-b116-84b8c6df3929 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.572017] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1341.572017] env[69982]: value = "task-3865746" [ 1341.572017] env[69982]: _type = "Task" [ 1341.572017] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.580761] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865746, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.583475] env[69982]: DEBUG nova.objects.instance [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'pci_requests' on Instance uuid 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1341.939580] env[69982]: DEBUG nova.compute.manager [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1341.940057] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1341.941098] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571e2903-02bd-470c-854a-7f1c29f6b6cc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.950104] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1341.950441] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea4266c1-e3ab-4c26-9ef2-c56a51081be6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.958147] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1341.958147] env[69982]: value = "task-3865747" [ 1341.958147] env[69982]: _type = "Task" [ 1341.958147] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.968299] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865747, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.974090] env[69982]: DEBUG oslo_concurrency.lockutils [req-2c3226ef-96ba-4a32-8c1c-bd1ad8955a8a req-988ecd1c-c97c-4abc-9b78-b5b6d752d20b service nova] Releasing lock "refresh_cache-e778d67f-13de-4a50-9c46-137bdbfd4ddf" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.080250] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865746, 'name': Rename_Task, 'duration_secs': 0.14907} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.080556] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1342.080959] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a008a9e3-ae00-4f3c-9c54-0a3384cfb2b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.087763] env[69982]: DEBUG nova.objects.base [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Object Instance<889ff9c8-08a2-4249-ae5b-bc94bc16dc8c> lazy-loaded attributes: flavor,pci_requests {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1342.087763] env[69982]: DEBUG nova.network.neutron [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1342.089995] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1342.089995] env[69982]: value = "task-3865748" [ 1342.089995] env[69982]: _type = "Task" [ 1342.089995] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.099520] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.174264] env[69982]: DEBUG nova.policy [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9a331c0503c841c29481e05f01e9129b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9f274f86bbe43d4b92ac058f100ba0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1342.257560] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.257813] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquired lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.258076] env[69982]: DEBUG nova.network.neutron [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1342.468892] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865747, 'name': PowerOffVM_Task, 'duration_secs': 0.197853} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.469240] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.469371] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1342.469633] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3f5022b-8500-413e-a17c-9f56ed02e159 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.501842] env[69982]: DEBUG nova.compute.manager [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1342.501933] env[69982]: DEBUG nova.compute.manager [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing instance network info cache due to event network-changed-93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1342.502184] env[69982]: DEBUG oslo_concurrency.lockutils [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.502358] env[69982]: DEBUG oslo_concurrency.lockutils [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1342.502501] env[69982]: DEBUG nova.network.neutron [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1342.550569] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1342.550927] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1342.551143] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleting the datastore file [datastore1] 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1342.551457] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d49da12-ab07-4b0e-859b-aca9dff30327 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.559717] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for the task: (returnval){ [ 1342.559717] env[69982]: value = "task-3865750" [ 1342.559717] env[69982]: _type = "Task" [ 1342.559717] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.569094] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.603440] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1342.603673] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.605434] env[69982]: DEBUG oslo_vmware.api [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865748, 'name': PowerOnVM_Task, 'duration_secs': 0.471813} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.605957] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.606178] env[69982]: INFO nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1342.606360] env[69982]: DEBUG nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1342.607561] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67343b6c-8084-40e0-bc8e-ad8fe453be31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.964192] env[69982]: DEBUG nova.network.neutron [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [{"id": "dc946213-769e-4998-bc51-5344491a5d10", "address": "fa:16:3e:31:7b:65", "network": {"id": "c468ab94-e60e-4d1a-bf81-0ebb1f752288", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-177591604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "efe64e69253d49a6a1146f240506ce39", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27138a4c-60c9-45fb-bf37-4c2f765315a3", "external-id": "nsx-vlan-transportzone-736", "segmentation_id": 736, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc946213-76", "ovs_interfaceid": "dc946213-769e-4998-bc51-5344491a5d10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.069942] env[69982]: DEBUG oslo_vmware.api [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Task: {'id': task-3865750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140677} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.069942] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1343.070208] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1343.070290] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1343.070473] env[69982]: INFO nova.compute.manager [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1343.070705] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1343.070893] env[69982]: DEBUG nova.compute.manager [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1343.070988] env[69982]: DEBUG nova.network.neutron [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1343.105883] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1343.126064] env[69982]: INFO nova.compute.manager [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Took 13.49 seconds to build instance. [ 1343.229227] env[69982]: DEBUG nova.network.neutron [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updated VIF entry in instance network info cache for port 93615399-3c22-4aa4-b3d0-f2c5cffc9368. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1343.229635] env[69982]: DEBUG nova.network.neutron [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.467391] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Releasing lock "refresh_cache-2cbd6771-48dd-44bc-a0e3-96dad0a7aead" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.468407] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb656f2-ceb4-4586-9269-6e8e41966bec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.475638] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Resuming the VM {{(pid=69982) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1343.475902] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0a0af5d-d49b-4e5d-b3fc-032cb7dc60a8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.482411] env[69982]: DEBUG oslo_vmware.api [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1343.482411] env[69982]: value = "task-3865751" [ 1343.482411] env[69982]: _type = "Task" [ 1343.482411] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.492341] env[69982]: DEBUG oslo_vmware.api [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.571040] env[69982]: DEBUG nova.compute.manager [req-1132f95e-3362-47a4-9dcc-937c190d84f8 req-5fc51e09-1666-4f8c-b35a-262c32f9527b service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Received event network-vif-deleted-d95c9544-b6cb-4f15-adb5-ef91c7ef325d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1343.571040] env[69982]: INFO nova.compute.manager [req-1132f95e-3362-47a4-9dcc-937c190d84f8 req-5fc51e09-1666-4f8c-b35a-262c32f9527b service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Neutron deleted interface d95c9544-b6cb-4f15-adb5-ef91c7ef325d; detaching it from the instance and deleting it from the info cache [ 1343.571267] env[69982]: DEBUG nova.network.neutron [req-1132f95e-3362-47a4-9dcc-937c190d84f8 req-5fc51e09-1666-4f8c-b35a-262c32f9527b service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1343.628253] env[69982]: DEBUG oslo_concurrency.lockutils [None req-57ac6c53-288d-44fe-9829-dd01a20f6ec9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1343.629561] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1343.629840] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1343.631505] env[69982]: INFO nova.compute.claims [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1343.732711] env[69982]: DEBUG oslo_concurrency.lockutils [req-a39d815b-0a50-479f-897d-08a58cf00878 req-9ed917e5-8ce3-4f12-b29c-b3d359cc34f9 service nova] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1343.763665] env[69982]: DEBUG nova.network.neutron [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Successfully updated port: b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.996130] env[69982]: DEBUG oslo_vmware.api [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865751, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.017376] env[69982]: DEBUG nova.network.neutron [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.074761] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-521bbbb8-ca08-482e-b7bd-be6c84ace091 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.086183] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7304510b-6fc4-4b47-8877-83532703bc37 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.117056] env[69982]: DEBUG nova.compute.manager [req-1132f95e-3362-47a4-9dcc-937c190d84f8 req-5fc51e09-1666-4f8c-b35a-262c32f9527b service nova] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Detach interface failed, port_id=d95c9544-b6cb-4f15-adb5-ef91c7ef325d, reason: Instance 6c20ba60-6552-47f3-8eb3-a71923a0a68f could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1344.267622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.267622] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.267622] env[69982]: DEBUG nova.network.neutron [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1344.494455] env[69982]: DEBUG oslo_vmware.api [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865751, 'name': PowerOnVM_Task, 'duration_secs': 0.557314} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.494784] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Resumed the VM {{(pid=69982) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1344.494975] env[69982]: DEBUG nova.compute.manager [None req-8f74c0cf-0e85-41a2-91fc-cb817b239968 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1344.495863] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556aab8a-10be-414a-9fbf-1b82322afb10 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.520375] env[69982]: INFO nova.compute.manager [-] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Took 1.45 seconds to deallocate network for instance. [ 1344.527404] env[69982]: DEBUG nova.compute.manager [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Received event network-changed-6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1344.527598] env[69982]: DEBUG nova.compute.manager [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Refreshing instance network info cache due to event network-changed-6c285c0e-e718-4241-ac88-8de36e44a62f. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1344.527822] env[69982]: DEBUG oslo_concurrency.lockutils [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] Acquiring lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.527967] env[69982]: DEBUG oslo_concurrency.lockutils [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] Acquired lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1344.528142] env[69982]: DEBUG nova.network.neutron [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Refreshing network info cache for port 6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1344.746420] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34d748f-eb4f-4d55-ab11-0648521e18ad {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.754891] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1210284-4b14-472a-862a-8b7068e2f381 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.788606] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1d9929-52af-456e-8e20-4c5ad6829977 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.797922] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69e60c1-3f9f-4b80-a84d-215396583e98 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.813106] env[69982]: DEBUG nova.compute.provider_tree [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1344.841222] env[69982]: WARNING nova.network.neutron [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] 0061b712-5877-4563-a7ff-45029c427868 already exists in list: networks containing: ['0061b712-5877-4563-a7ff-45029c427868']. ignoring it [ 1345.031267] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.316524] env[69982]: DEBUG nova.scheduler.client.report [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1345.430818] env[69982]: DEBUG nova.network.neutron [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "address": "fa:16:3e:38:0c:1a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ab07e2-91", "ovs_interfaceid": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.459357] env[69982]: DEBUG nova.network.neutron [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updated VIF entry in instance network info cache for port 6c285c0e-e718-4241-ac88-8de36e44a62f. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1345.459736] env[69982]: DEBUG nova.network.neutron [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating instance_info_cache with network_info: [{"id": "6c285c0e-e718-4241-ac88-8de36e44a62f", "address": "fa:16:3e:da:4e:83", "network": {"id": "469feea5-c69f-4cea-bd38-38ffb9e3a2af", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-2068936561-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d6d2d65079fb46d8a9b1a31d2eab9829", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c285c0e-e7", "ovs_interfaceid": "6c285c0e-e718-4241-ac88-8de36e44a62f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.595597] env[69982]: DEBUG nova.compute.manager [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1345.595829] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.595994] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.596160] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.596292] env[69982]: DEBUG nova.compute.manager [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] No waiting events found dispatching network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1345.596464] env[69982]: WARNING nova.compute.manager [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received unexpected event network-vif-plugged-b2ab07e2-9194-42f9-96cd-d32a4aceab17 for instance with vm_state active and task_state None. [ 1345.596689] env[69982]: DEBUG nova.compute.manager [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-changed-b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1345.596917] env[69982]: DEBUG nova.compute.manager [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing instance network info cache due to event network-changed-b2ab07e2-9194-42f9-96cd-d32a4aceab17. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1345.597174] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.633856] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.634115] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.634327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.634507] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.634669] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.637192] env[69982]: INFO nova.compute.manager [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Terminating instance [ 1345.821904] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.192s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.822474] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1345.825371] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.794s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.825657] env[69982]: DEBUG nova.objects.instance [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lazy-loading 'resources' on Instance uuid 6c20ba60-6552-47f3-8eb3-a71923a0a68f {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1345.933673] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.934343] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.934504] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.934784] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.934967] env[69982]: DEBUG nova.network.neutron [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Refreshing network info cache for port b2ab07e2-9194-42f9-96cd-d32a4aceab17 {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1345.937064] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e38d35-ec69-41a4-8481-d6803028f427 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.956163] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1345.956395] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1345.956564] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1345.956768] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1345.956915] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1345.957076] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1345.957287] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1345.957443] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1345.957607] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1345.957767] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1345.957934] env[69982]: DEBUG nova.virt.hardware [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1345.964371] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfiguring VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1345.965135] env[69982]: DEBUG oslo_concurrency.lockutils [req-651c7857-d1e2-4489-a09f-cf82e0750b7a req-2b32e67e-b4ac-4875-8d73-f79e6aed59c2 service nova] Releasing lock "refresh_cache-8f0dacd5-59ec-495e-bac7-df2b76883562" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.965499] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82f43e05-a18d-4d9d-b0e3-d12c5c4adb30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.983239] env[69982]: DEBUG oslo_vmware.api [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1345.983239] env[69982]: value = "task-3865752" [ 1345.983239] env[69982]: _type = "Task" [ 1345.983239] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.993024] env[69982]: DEBUG oslo_vmware.api [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865752, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.141336] env[69982]: DEBUG nova.compute.manager [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1346.141572] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.142513] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e2eecc-6b18-427c-8d92-3e4dd739faa4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.151366] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.151631] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a7488a8-f58f-442a-982e-16df54889ef2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.158280] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1346.158280] env[69982]: value = "task-3865753" [ 1346.158280] env[69982]: _type = "Task" [ 1346.158280] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.167270] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.328874] env[69982]: DEBUG nova.compute.utils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1346.332908] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1346.333127] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1346.376104] env[69982]: DEBUG nova.policy [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fec33fa84b60450d95e4cb53b5aac0f2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7372e00e1966430da6131e02f199ba14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1346.425047] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61eb697c-a8c8-47b4-8b5c-792d27a0c8ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.434917] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc30e953-4d57-4c0b-a29c-f1f58ffc657d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.468145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d032dad-c96b-4442-827d-d668fdc20e4a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.476347] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ae4466-bddf-4e05-9fae-f3e9fcc99346 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.491022] env[69982]: DEBUG nova.compute.provider_tree [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1346.500261] env[69982]: DEBUG oslo_vmware.api [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.668365] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865753, 'name': PowerOffVM_Task, 'duration_secs': 0.214213} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.668623] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.668792] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.669053] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3177a8e7-ade0-45e0-b902-4253564a2d0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.747391] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.748037] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.748888] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleting the datastore file [datastore2] 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.749924] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef9112d7-3381-451a-9c7a-516f5ad139c3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.761602] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for the task: (returnval){ [ 1346.761602] env[69982]: value = "task-3865755" [ 1346.761602] env[69982]: _type = "Task" [ 1346.761602] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.770397] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.829799] env[69982]: DEBUG nova.network.neutron [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updated VIF entry in instance network info cache for port b2ab07e2-9194-42f9-96cd-d32a4aceab17. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1346.830316] env[69982]: DEBUG nova.network.neutron [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "address": "fa:16:3e:38:0c:1a", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2ab07e2-91", "ovs_interfaceid": "b2ab07e2-9194-42f9-96cd-d32a4aceab17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1346.836648] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1346.887991] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Successfully created port: 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1346.996506] env[69982]: DEBUG nova.scheduler.client.report [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1347.005023] env[69982]: DEBUG oslo_vmware.api [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.275699] env[69982]: DEBUG oslo_vmware.api [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Task: {'id': task-3865755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148096} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.275996] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.276321] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.276627] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.276939] env[69982]: INFO nova.compute.manager [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1347.277283] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1347.277556] env[69982]: DEBUG nova.compute.manager [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1347.277699] env[69982]: DEBUG nova.network.neutron [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1347.333525] env[69982]: DEBUG oslo_concurrency.lockutils [req-3538d64a-c704-4cd9-8d02-95190b055e49 req-abd2f2ae-0685-4b24-978c-f0b0463404a0 service nova] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.501849] env[69982]: DEBUG oslo_vmware.api [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865752, 'name': ReconfigVM_Task, 'duration_secs': 1.345647} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.502068] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1347.502244] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfigured VM to attach interface {{(pid=69982) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1347.505974] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.531924] env[69982]: INFO nova.scheduler.client.report [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Deleted allocations for instance 6c20ba60-6552-47f3-8eb3-a71923a0a68f [ 1347.780932] env[69982]: DEBUG nova.compute.manager [req-b1281abe-df60-4493-b28b-f1daf7c975b5 req-0accb08c-f7c5-4933-a8e4-44b0e627db3b service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Received event network-vif-deleted-dc946213-769e-4998-bc51-5344491a5d10 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1347.781261] env[69982]: INFO nova.compute.manager [req-b1281abe-df60-4493-b28b-f1daf7c975b5 req-0accb08c-f7c5-4933-a8e4-44b0e627db3b service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Neutron deleted interface dc946213-769e-4998-bc51-5344491a5d10; detaching it from the instance and deleting it from the info cache [ 1347.781483] env[69982]: DEBUG nova.network.neutron [req-b1281abe-df60-4493-b28b-f1daf7c975b5 req-0accb08c-f7c5-4933-a8e4-44b0e627db3b service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.851490] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1347.882514] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1347.882779] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1347.882956] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1347.883127] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1347.883347] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1347.883416] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1347.883631] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1347.883952] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1347.884042] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1347.884138] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1347.884314] env[69982]: DEBUG nova.virt.hardware [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1347.885225] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc9e694-ea8a-488a-b775-d8bbb28831e1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.893397] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da02bb2-f3b9-4f3d-a523-eaed8e7077ff {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.007323] env[69982]: DEBUG oslo_concurrency.lockutils [None req-f801a473-c9cd-4090-ba18-edbf53666bce tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.087s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.042455] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5c3e1eef-c2ca-4c2d-b0eb-77b58d39c236 tempest-ServerActionsTestOtherB-728294423 tempest-ServerActionsTestOtherB-728294423-project-member] Lock "6c20ba60-6552-47f3-8eb3-a71923a0a68f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.610s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.266875] env[69982]: DEBUG nova.network.neutron [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.284906] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5324d2a-c71f-41a4-b910-c701462b1289 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.295612] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5443ab50-d645-4093-8337-2b2510e359b8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.327550] env[69982]: DEBUG nova.compute.manager [req-b1281abe-df60-4493-b28b-f1daf7c975b5 req-0accb08c-f7c5-4933-a8e4-44b0e627db3b service nova] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Detach interface failed, port_id=dc946213-769e-4998-bc51-5344491a5d10, reason: Instance 2cbd6771-48dd-44bc-a0e3-96dad0a7aead could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1348.349145] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Successfully updated port: 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1348.770053] env[69982]: INFO nova.compute.manager [-] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Took 1.49 seconds to deallocate network for instance. [ 1348.852683] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1348.852951] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1348.853191] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1349.276936] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.277216] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.277442] env[69982]: DEBUG nova.objects.instance [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lazy-loading 'resources' on Instance uuid 2cbd6771-48dd-44bc-a0e3-96dad0a7aead {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.386685] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1349.513408] env[69982]: DEBUG nova.network.neutron [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.674042] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.674281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.809858] env[69982]: DEBUG nova.compute.manager [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1349.809858] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.809858] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.809858] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1349.809858] env[69982]: DEBUG nova.compute.manager [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] No waiting events found dispatching network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1349.810195] env[69982]: WARNING nova.compute.manager [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received unexpected event network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d for instance with vm_state building and task_state spawning. [ 1349.810195] env[69982]: DEBUG nova.compute.manager [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1349.810298] env[69982]: DEBUG nova.compute.manager [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing instance network info cache due to event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1349.810459] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.866590] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ecec4c-053d-410e-b174-3a81dd5d430a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.875012] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e22c881-f3eb-4780-b540-1f9df6991cb5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.909369] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9303ee82-561f-47c4-9797-714d97730f45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.918579] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27114cd4-fecf-4756-b6e2-fe6cdb50e833 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.935509] env[69982]: DEBUG nova.compute.provider_tree [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.016390] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.016749] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance network_info: |[{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1350.017082] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.017268] env[69982]: DEBUG nova.network.neutron [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1350.018677] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:20:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e64b942-be80-4f11-8eaa-9e4f6219c94d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1350.026660] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1350.029582] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1350.030022] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60dc887e-5da0-4f29-abcd-c3f77f4e7159 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.051411] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1350.051411] env[69982]: value = "task-3865757" [ 1350.051411] env[69982]: _type = "Task" [ 1350.051411] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.060682] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865757, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.176648] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.176879] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1350.177743] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74b56b5-b7ba-464e-b23b-28a58a900f6e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.196020] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62caaf0-e178-4981-b912-64b006e9c6f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.222903] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfiguring VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1350.227222] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-066a8e9e-ddd6-432e-a502-3636d38d5f2f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.251612] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1350.251612] env[69982]: value = "task-3865758" [ 1350.251612] env[69982]: _type = "Task" [ 1350.251612] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.260984] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.323115] env[69982]: DEBUG nova.network.neutron [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updated VIF entry in instance network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1350.323555] env[69982]: DEBUG nova.network.neutron [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.440190] env[69982]: DEBUG nova.scheduler.client.report [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1350.562652] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865757, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.762978] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.826954] env[69982]: DEBUG oslo_concurrency.lockutils [req-5bbe01b1-e325-4679-b569-ca4b4aacc115 req-69048efc-5cb7-4623-bec8-033f3c4216c4 service nova] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1350.944747] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.973019] env[69982]: INFO nova.scheduler.client.report [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Deleted allocations for instance 2cbd6771-48dd-44bc-a0e3-96dad0a7aead [ 1351.062317] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865757, 'name': CreateVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.262321] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.482383] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a5f14443-f943-4483-b40b-cafc17566e95 tempest-ServerActionsTestJSON-676534199 tempest-ServerActionsTestJSON-676534199-project-member] Lock "2cbd6771-48dd-44bc-a0e3-96dad0a7aead" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.848s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1351.563530] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865757, 'name': CreateVM_Task, 'duration_secs': 1.344439} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.563728] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1351.564436] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1351.564603] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1351.564936] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1351.565560] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d1a9c1-265e-4532-96e9-7a5b4adaae75 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.570482] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1351.570482] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae9eb6-bb5a-96ed-2b7e-bde4ea6510c9" [ 1351.570482] env[69982]: _type = "Task" [ 1351.570482] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.578882] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae9eb6-bb5a-96ed-2b7e-bde4ea6510c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.763305] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.082434] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52ae9eb6-bb5a-96ed-2b7e-bde4ea6510c9, 'name': SearchDatastore_Task, 'duration_secs': 0.013313} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.082828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1352.082984] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1352.083239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1352.083386] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1352.083568] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1352.083840] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dd19460-1b10-49c1-aca7-95281f876688 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.093363] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1352.093563] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1352.094372] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f342a227-51cd-40e2-a893-bc1c7fad6fc4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.100127] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1352.100127] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251ef26-d16f-f375-ee4f-4cb4ccb080fc" [ 1352.100127] env[69982]: _type = "Task" [ 1352.100127] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.108470] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251ef26-d16f-f375-ee4f-4cb4ccb080fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.263444] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.620119] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5251ef26-d16f-f375-ee4f-4cb4ccb080fc, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.620119] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bbf96d5-3dbe-4f18-969c-f6d702da4b8a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.620972] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1352.620972] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525196ef-a9c1-0a2a-74e4-7b27d6ea29d8" [ 1352.620972] env[69982]: _type = "Task" [ 1352.620972] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.631385] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525196ef-a9c1-0a2a-74e4-7b27d6ea29d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.766873] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.137272] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]525196ef-a9c1-0a2a-74e4-7b27d6ea29d8, 'name': SearchDatastore_Task, 'duration_secs': 0.010941} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1353.137712] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1353.138209] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1353.138667] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8c95a60-0a90-4fd7-9652-85555dad6462 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.149098] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1353.149098] env[69982]: value = "task-3865759" [ 1353.149098] env[69982]: _type = "Task" [ 1353.149098] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.159566] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.266368] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.660771] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865759, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.770119] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.161033] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865759, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532265} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.161283] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1354.161500] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1354.161754] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a43ff517-234f-4998-8733-2551e7ce0cd2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.169671] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1354.169671] env[69982]: value = "task-3865760" [ 1354.169671] env[69982]: _type = "Task" [ 1354.169671] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.178971] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.270286] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.680165] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074527} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.680566] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1354.681727] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7414135-c48b-4068-8c07-685910bec447 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.704999] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1354.704999] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-995c396c-8a15-43ef-bc9c-0a00cd5d932e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.725614] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1354.725614] env[69982]: value = "task-3865761" [ 1354.725614] env[69982]: _type = "Task" [ 1354.725614] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.734025] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865761, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.768444] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.240024] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865761, 'name': ReconfigVM_Task, 'duration_secs': 0.306161} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.240024] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1355.240024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76179dfb-a69e-47cc-b142-ffb796daf9b1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.246471] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1355.246471] env[69982]: value = "task-3865762" [ 1355.246471] env[69982]: _type = "Task" [ 1355.246471] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.257679] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865762, 'name': Rename_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.270343] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.766265] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865762, 'name': Rename_Task, 'duration_secs': 0.152648} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.775084] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1355.775559] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-93576b86-2042-4877-81ff-608cf322a411 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.787952] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1355.787952] env[69982]: value = "task-3865763" [ 1355.787952] env[69982]: _type = "Task" [ 1355.787952] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.788333] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.799946] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865763, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.277207] env[69982]: DEBUG oslo_vmware.api [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865758, 'name': ReconfigVM_Task, 'duration_secs': 5.79868} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.278116] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.278339] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Reconfigured VM to detach interface {{(pid=69982) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1356.303583] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865763, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.801064] env[69982]: DEBUG oslo_vmware.api [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865763, 'name': PowerOnVM_Task, 'duration_secs': 0.534159} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.801404] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1356.801574] env[69982]: INFO nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1356.801756] env[69982]: DEBUG nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1356.802600] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21929a79-1cb8-45fd-a05f-3691f2977640 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.330162] env[69982]: INFO nova.compute.manager [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Took 13.72 seconds to build instance. [ 1357.730252] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.730476] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquired lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.730677] env[69982]: DEBUG nova.network.neutron [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1357.813282] env[69982]: DEBUG nova.compute.manager [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1357.813567] env[69982]: DEBUG nova.compute.manager [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing instance network info cache due to event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1357.813769] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.813895] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1357.816621] env[69982]: DEBUG nova.network.neutron [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1357.837118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4b67a737-37fc-4656-b4b0-6e6d42a4e272 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.233s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.242749] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.242983] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.243200] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.243377] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.243610] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.245529] env[69982]: INFO nova.compute.manager [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Terminating instance [ 1358.549838] env[69982]: INFO nova.network.neutron [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Port b2ab07e2-9194-42f9-96cd-d32a4aceab17 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1358.550645] env[69982]: DEBUG nova.network.neutron [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [{"id": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "address": "fa:16:3e:7a:39:48", "network": {"id": "0061b712-5877-4563-a7ff-45029c427868", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1481944446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9f274f86bbe43d4b92ac058f100ba0f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fe38bb7e-8bcb-419d-868f-0dc105c69651", "external-id": "nsx-vlan-transportzone-432", "segmentation_id": 432, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93615399-3c", "ovs_interfaceid": "93615399-3c22-4aa4-b3d0-f2c5cffc9368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.631921] env[69982]: DEBUG nova.network.neutron [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updated VIF entry in instance network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1358.632355] env[69982]: DEBUG nova.network.neutron [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1358.752808] env[69982]: DEBUG nova.compute.manager [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1358.753072] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1358.753981] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc35de0e-732b-4d47-b225-61fa87bc6e1d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.764551] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1358.765249] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0455887-3f71-42c3-abb9-a9f9ea0ecfe3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.772931] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1358.772931] env[69982]: value = "task-3865767" [ 1358.772931] env[69982]: _type = "Task" [ 1358.772931] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.783035] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.053775] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Releasing lock "refresh_cache-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.135429] env[69982]: DEBUG oslo_concurrency.lockutils [req-f1ff196c-d70d-4474-b481-97984e71368d req-731dfee2-fa74-410a-a8ee-d5ed99fbddd8 service nova] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1359.289285] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865767, 'name': PowerOffVM_Task, 'duration_secs': 0.190501} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.290031] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.290089] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.290391] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f966985f-be77-49df-8e99-d80288534e30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.360859] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.361095] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.361284] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleting the datastore file [datastore1] 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.361564] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c9a9c77-c2da-4249-b117-415b799c657c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.371939] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1359.371939] env[69982]: value = "task-3865769" [ 1359.371939] env[69982]: _type = "Task" [ 1359.371939] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.384701] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.563601] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a04e25ff-f55d-43b6-8854-e266dd60b80d tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "interface-889ff9c8-08a2-4249-ae5b-bc94bc16dc8c-b2ab07e2-9194-42f9-96cd-d32a4aceab17" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.889s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.885619] env[69982]: DEBUG oslo_vmware.api [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.397912} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.885619] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1359.885619] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1359.885619] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1359.885619] env[69982]: INFO nova.compute.manager [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1359.885619] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1359.885619] env[69982]: DEBUG nova.compute.manager [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1359.885619] env[69982]: DEBUG nova.network.neutron [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1360.144091] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1360.144543] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1360.647916] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1360.675342] env[69982]: DEBUG nova.compute.manager [req-75e24ab7-a025-415c-813f-076a6870419a req-f324d78b-24d7-4929-b848-f71008a79237 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Received event network-vif-deleted-93615399-3c22-4aa4-b3d0-f2c5cffc9368 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1360.675342] env[69982]: INFO nova.compute.manager [req-75e24ab7-a025-415c-813f-076a6870419a req-f324d78b-24d7-4929-b848-f71008a79237 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Neutron deleted interface 93615399-3c22-4aa4-b3d0-f2c5cffc9368; detaching it from the instance and deleting it from the info cache [ 1360.675342] env[69982]: DEBUG nova.network.neutron [req-75e24ab7-a025-415c-813f-076a6870419a req-f324d78b-24d7-4929-b848-f71008a79237 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.157220] env[69982]: DEBUG nova.network.neutron [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.176326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1361.178883] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.003s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1361.182995] env[69982]: INFO nova.compute.claims [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1361.189724] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fa387f4-13b9-4f3a-8ec8-f8d78eeb85d0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.205589] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6f09e6-9e90-46e2-a36b-f3e61ab3f529 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.241084] env[69982]: DEBUG nova.compute.manager [req-75e24ab7-a025-415c-813f-076a6870419a req-f324d78b-24d7-4929-b848-f71008a79237 service nova] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Detach interface failed, port_id=93615399-3c22-4aa4-b3d0-f2c5cffc9368, reason: Instance 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1361.662589] env[69982]: INFO nova.compute.manager [-] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Took 1.78 seconds to deallocate network for instance. [ 1362.169817] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.287855] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0385e6b4-7f2c-4c89-8bf3-9e9c6a30245b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.298781] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84be5f06-aa3d-460c-ada9-7ad6f0170999 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.338294] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bfcb6a-e7fd-49b2-9948-c303c0419247 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.347635] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0796f2a-ff32-43e3-a34e-ef63175f5fb7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.366124] env[69982]: DEBUG nova.compute.provider_tree [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1362.869108] env[69982]: DEBUG nova.scheduler.client.report [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1363.284305] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.374710] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.196s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.376011] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1363.377927] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.208s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.378457] env[69982]: DEBUG nova.objects.instance [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'resources' on Instance uuid 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.881322] env[69982]: DEBUG nova.compute.utils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1363.886488] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1363.886655] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1363.925988] env[69982]: DEBUG nova.policy [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5ff618c282a429289cfdc037998ab6d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '919dc1942f6f4f81aaec826bf239d1b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1363.965490] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c364191-cb2c-4f40-b6bb-0283eb778d12 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.974978] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3206ddc5-471d-4ef1-8cd4-2e1db08065eb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.011950] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85c29ac-35d1-4179-a9ef-9170aa077225 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.020439] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adabda01-d557-424b-a8ee-ed7c0384ef51 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.035819] env[69982]: DEBUG nova.compute.provider_tree [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.205090] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Successfully created port: e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1364.388019] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1364.539579] env[69982]: DEBUG nova.scheduler.client.report [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1365.045435] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.069609] env[69982]: INFO nova.scheduler.client.report [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted allocations for instance 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c [ 1365.285154] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1365.399426] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1365.430346] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-05-07T07:02:57Z,direct_url=,disk_format='vmdk',id=a4e69d6f-1c15-4f57-92a8-5e81c6be8172,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='a1f539316afa4eeb82cb433ea8b6071b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-05-07T07:02:57Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1365.430815] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1365.430991] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1365.431195] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1365.431343] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1365.431491] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1365.431704] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1365.431863] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1365.432054] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1365.432234] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1365.432408] env[69982]: DEBUG nova.virt.hardware [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1365.434120] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46efebd-0411-411e-9315-92b0c80045c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.442494] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fa829d-1296-42a1-a8cd-23bb467b1fb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.578445] env[69982]: DEBUG oslo_concurrency.lockutils [None req-0fdcd5aa-09f5-4df7-9bd1-8096469ef6f3 tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "889ff9c8-08a2-4249-ae5b-bc94bc16dc8c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.335s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.590350] env[69982]: DEBUG nova.compute.manager [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Received event network-vif-plugged-e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1365.591246] env[69982]: DEBUG oslo_concurrency.lockutils [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] Acquiring lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1365.591246] env[69982]: DEBUG oslo_concurrency.lockutils [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1365.591246] env[69982]: DEBUG oslo_concurrency.lockutils [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.591367] env[69982]: DEBUG nova.compute.manager [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] No waiting events found dispatching network-vif-plugged-e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1365.592772] env[69982]: WARNING nova.compute.manager [req-f93ad29b-2408-45b0-a26b-f4f8d4a749a5 req-5e7f249f-16ae-48ee-9d2d-3b5f5547020a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Received unexpected event network-vif-plugged-e73e1d6c-fa11-4e6c-990b-d60db96b26ec for instance with vm_state building and task_state spawning. [ 1365.674544] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Successfully updated port: e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1366.181616] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.181828] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquired lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1366.182228] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1366.183854] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.184145] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.185048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.185048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1366.185048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.186686] env[69982]: INFO nova.compute.manager [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Terminating instance [ 1366.689982] env[69982]: DEBUG nova.compute.manager [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1366.690289] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1366.691184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53df518e-92c3-4084-9a24-4b0a184fe192 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.701623] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1366.701949] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8ccd29b-366c-4de7-88e6-000de38db9de {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.710561] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1366.710561] env[69982]: value = "task-3865774" [ 1366.710561] env[69982]: _type = "Task" [ 1366.710561] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.722224] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865774, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.726883] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1366.890487] env[69982]: DEBUG nova.network.neutron [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Updating instance_info_cache with network_info: [{"id": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "address": "fa:16:3e:64:d1:57", "network": {"id": "659e0355-9d48-45b9-8918-2acea85f3ada", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1122949631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919dc1942f6f4f81aaec826bf239d1b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73e1d6c-fa", "ovs_interfaceid": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.221754] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865774, 'name': PowerOffVM_Task, 'duration_secs': 0.354316} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.222044] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1367.222219] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1367.222486] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae491294-e30d-47fc-a003-d144b3edf8ec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.280166] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.284023] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1367.290102] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1367.290337] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1367.290520] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleting the datastore file [datastore2] e778d67f-13de-4a50-9c46-137bdbfd4ddf {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1367.290787] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-683d4792-2764-4117-becc-180d41ad09cd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.298156] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for the task: (returnval){ [ 1367.298156] env[69982]: value = "task-3865776" [ 1367.298156] env[69982]: _type = "Task" [ 1367.298156] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.306798] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.393686] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Releasing lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1367.394112] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Instance network_info: |[{"id": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "address": "fa:16:3e:64:d1:57", "network": {"id": "659e0355-9d48-45b9-8918-2acea85f3ada", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1122949631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919dc1942f6f4f81aaec826bf239d1b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73e1d6c-fa", "ovs_interfaceid": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1367.394554] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:d1:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7c708997-9b6e-4c27-8a58-02c0d1359d5c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e73e1d6c-fa11-4e6c-990b-d60db96b26ec', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1367.403185] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Creating folder: Project (919dc1942f6f4f81aaec826bf239d1b4). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1367.403508] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67a15a98-d6ea-405f-9400-8799edea0ec6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.416837] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Created folder: Project (919dc1942f6f4f81aaec826bf239d1b4) in parent group-v767796. [ 1367.417045] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Creating folder: Instances. Parent ref: group-v768136. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1367.417311] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6694265d-5723-4c76-8331-67572eb9868e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.427949] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Created folder: Instances in parent group-v768136. [ 1367.428214] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1367.428410] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1367.428616] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6cbef890-328e-4808-9a47-b15a8f831e38 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.448136] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1367.448136] env[69982]: value = "task-3865779" [ 1367.448136] env[69982]: _type = "Task" [ 1367.448136] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.456707] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865779, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.616993] env[69982]: DEBUG nova.compute.manager [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Received event network-changed-e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1367.617216] env[69982]: DEBUG nova.compute.manager [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Refreshing instance network info cache due to event network-changed-e73e1d6c-fa11-4e6c-990b-d60db96b26ec. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1367.617433] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] Acquiring lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.617570] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] Acquired lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.617728] env[69982]: DEBUG nova.network.neutron [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Refreshing network info cache for port e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1367.808730] env[69982]: DEBUG oslo_vmware.api [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Task: {'id': task-3865776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108403} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.809200] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1367.809200] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1367.809348] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1367.809564] env[69982]: INFO nova.compute.manager [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1367.809847] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1367.810058] env[69982]: DEBUG nova.compute.manager [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1367.810158] env[69982]: DEBUG nova.network.neutron [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1367.958062] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865779, 'name': CreateVM_Task, 'duration_secs': 0.345223} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.958364] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1367.958944] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.959125] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1367.959464] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1367.959734] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-049e77e4-57fb-48e2-8bc8-84868e84f3fa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.964994] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1367.964994] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202203e-bda2-4e2e-615b-69378d0fa110" [ 1367.964994] env[69982]: _type = "Task" [ 1367.964994] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.973565] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202203e-bda2-4e2e-615b-69378d0fa110, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.285863] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1368.315062] env[69982]: DEBUG nova.network.neutron [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Updated VIF entry in instance network info cache for port e73e1d6c-fa11-4e6c-990b-d60db96b26ec. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1368.315468] env[69982]: DEBUG nova.network.neutron [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Updating instance_info_cache with network_info: [{"id": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "address": "fa:16:3e:64:d1:57", "network": {"id": "659e0355-9d48-45b9-8918-2acea85f3ada", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1122949631-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "919dc1942f6f4f81aaec826bf239d1b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7c708997-9b6e-4c27-8a58-02c0d1359d5c", "external-id": "nsx-vlan-transportzone-370", "segmentation_id": 370, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape73e1d6c-fa", "ovs_interfaceid": "e73e1d6c-fa11-4e6c-990b-d60db96b26ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.476777] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5202203e-bda2-4e2e-615b-69378d0fa110, 'name': SearchDatastore_Task, 'duration_secs': 0.009589} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.476777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1368.478031] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Processing image a4e69d6f-1c15-4f57-92a8-5e81c6be8172 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1368.478031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.478031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1368.478031] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1368.478328] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5cdbb0e-fd5e-4a1f-8d0a-7d9b4442cced {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.487842] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1368.488058] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1368.488808] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-859df56a-e3bf-4e17-851c-1b853822398d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.495614] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1368.495614] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fa637f-bfef-ee57-de04-d7b6b90c0078" [ 1368.495614] env[69982]: _type = "Task" [ 1368.495614] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.503264] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fa637f-bfef-ee57-de04-d7b6b90c0078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.567702] env[69982]: DEBUG nova.network.neutron [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1368.818879] env[69982]: DEBUG oslo_concurrency.lockutils [req-5ec51e0b-9a8b-4f3c-a634-03280517d26c req-c68675dc-5b53-4d1e-8d1d-a6a55943866a service nova] Releasing lock "refresh_cache-842ca18a-d33b-4658-9d78-e51b5de0ea68" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.006306] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52fa637f-bfef-ee57-de04-d7b6b90c0078, 'name': SearchDatastore_Task, 'duration_secs': 0.009557} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.007200] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad59ab45-ed5c-475e-8daa-bfe3f9281be6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.013105] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1369.013105] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52630da4-f395-6132-d65d-385c68638f70" [ 1369.013105] env[69982]: _type = "Task" [ 1369.013105] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.021031] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52630da4-f395-6132-d65d-385c68638f70, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.070698] env[69982]: INFO nova.compute.manager [-] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Took 1.26 seconds to deallocate network for instance. [ 1369.284119] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.284380] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1369.524500] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52630da4-f395-6132-d65d-385c68638f70, 'name': SearchDatastore_Task, 'duration_secs': 0.009901} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.524777] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1369.525060] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 842ca18a-d33b-4658-9d78-e51b5de0ea68/842ca18a-d33b-4658-9d78-e51b5de0ea68.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1369.525349] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e9b2a7f-9649-4036-b0ee-8b6865502714 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.531755] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1369.531755] env[69982]: value = "task-3865781" [ 1369.531755] env[69982]: _type = "Task" [ 1369.531755] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.540835] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.577835] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1369.578192] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1369.578442] env[69982]: DEBUG nova.objects.instance [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lazy-loading 'resources' on Instance uuid e778d67f-13de-4a50-9c46-137bdbfd4ddf {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.648874] env[69982]: DEBUG nova.compute.manager [req-fc0cb0a8-b874-4803-bbac-d479606ce82f req-b60b42d5-3b31-4ab0-b5eb-3d39f24e66fa service nova] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Received event network-vif-deleted-90606ccc-f773-4313-9003-0b3239a7ca18 {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1369.787462] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1370.042248] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453472} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.042640] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a4e69d6f-1c15-4f57-92a8-5e81c6be8172/a4e69d6f-1c15-4f57-92a8-5e81c6be8172.vmdk to [datastore1] 842ca18a-d33b-4658-9d78-e51b5de0ea68/842ca18a-d33b-4658-9d78-e51b5de0ea68.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1370.042727] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Extending root virtual disk to 1048576 {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1370.042994] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5c181d9-3215-4cd9-9a6c-36ce838c6a45 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.049871] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1370.049871] env[69982]: value = "task-3865782" [ 1370.049871] env[69982]: _type = "Task" [ 1370.049871] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.058699] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.149275] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d16d19-381b-4739-94a6-69ff7525ebbb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.157975] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dcf157-1f33-40f9-93d7-4d20ec0702e8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.190735] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d61ebee4-4dc9-41a6-9c1c-924ba64476b2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.199320] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e03393c-8983-4fad-bd96-376e20f44da0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.214374] env[69982]: DEBUG nova.compute.provider_tree [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.559979] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064289} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.560331] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Extended root virtual disk {{(pid=69982) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1370.561156] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a6befb-52a3-4274-b2ba-58b8e23c7a88 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.583344] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 842ca18a-d33b-4658-9d78-e51b5de0ea68/842ca18a-d33b-4658-9d78-e51b5de0ea68.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1370.583643] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7426de8d-0c24-4fb9-9504-bd3795531628 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.603409] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1370.603409] env[69982]: value = "task-3865784" [ 1370.603409] env[69982]: _type = "Task" [ 1370.603409] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.611670] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865784, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.717829] env[69982]: DEBUG nova.scheduler.client.report [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1371.114290] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865784, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.222436] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.644s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.224776] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.437s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.225165] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1371.225165] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1371.226022] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb761cf1-9b60-42ee-87e5-98a035fb8fd9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.234525] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91072c0d-6735-49f9-b385-b20d1668d4ac {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.241035] env[69982]: INFO nova.scheduler.client.report [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Deleted allocations for instance e778d67f-13de-4a50-9c46-137bdbfd4ddf [ 1371.255511] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce7dee0-e6d6-4891-b353-91593097927e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.263779] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0240c9d-bd62-46d6-ba53-90c2dad8964f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.295444] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180259MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1371.296197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.296197] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.615092] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865784, 'name': ReconfigVM_Task, 'duration_secs': 0.563312} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.615376] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 842ca18a-d33b-4658-9d78-e51b5de0ea68/842ca18a-d33b-4658-9d78-e51b5de0ea68.vmdk or device None with type sparse {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1371.616040] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0918b9f-c0c9-4773-883c-1bd067967acd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.624017] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1371.624017] env[69982]: value = "task-3865785" [ 1371.624017] env[69982]: _type = "Task" [ 1371.624017] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.632068] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865785, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.762310] env[69982]: DEBUG oslo_concurrency.lockutils [None req-5ea79a97-f373-4a10-be6b-98ada29a898e tempest-AttachInterfacesTestJSON-924533598 tempest-AttachInterfacesTestJSON-924533598-project-member] Lock "e778d67f-13de-4a50-9c46-137bdbfd4ddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.578s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.134292] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865785, 'name': Rename_Task, 'duration_secs': 0.171365} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.134581] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1372.134812] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9dbf6532-9302-425c-88d2-2f7dee52e03e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.142053] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1372.142053] env[69982]: value = "task-3865786" [ 1372.142053] env[69982]: _type = "Task" [ 1372.142053] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.150170] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.318984] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 8f0dacd5-59ec-495e-bac7-df2b76883562 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1372.319132] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 86b67d38-95b6-46cf-bf7c-524a06773c0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1372.319231] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 842ca18a-d33b-4658-9d78-e51b5de0ea68 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1372.319427] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1372.319602] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1372.373780] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a267456-fc6f-4632-bd64-6e86b0b47f8b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.381971] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f13b90-9371-4fe6-8cce-7f29d6ff87ee {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.413893] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ec9510-aaf9-4253-a934-4c9dc3acf5d6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.422386] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50360dd-42a7-49af-bd26-7b0883f7a0c0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.437274] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.652071] env[69982]: DEBUG oslo_vmware.api [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865786, 'name': PowerOnVM_Task, 'duration_secs': 0.451931} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.652342] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1372.652543] env[69982]: INFO nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Took 7.25 seconds to spawn the instance on the hypervisor. [ 1372.652723] env[69982]: DEBUG nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1372.653493] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b649fb-a634-4fbc-afca-0059652412b4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.940640] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.169692] env[69982]: INFO nova.compute.manager [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Took 12.01 seconds to build instance. [ 1373.445873] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1373.446113] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.150s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1373.674176] env[69982]: DEBUG oslo_concurrency.lockutils [None req-342a5189-cd49-442a-a95a-360d8ca9756b tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.530s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.445770] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1374.446054] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1374.805500] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.805772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.806077] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.806286] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.807062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.808803] env[69982]: INFO nova.compute.manager [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Terminating instance [ 1375.314094] env[69982]: DEBUG nova.compute.manager [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1375.314318] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1375.315184] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8733dfd2-576e-48a8-8bfe-872b2f638791 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.324733] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1375.325111] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4773f0fc-3019-4776-9c48-2c171e81f4f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.333420] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1375.333420] env[69982]: value = "task-3865788" [ 1375.333420] env[69982]: _type = "Task" [ 1375.333420] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.343337] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.844475] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865788, 'name': PowerOffVM_Task, 'duration_secs': 0.219487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.844772] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.844919] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1375.845203] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e6be2c6-39cf-4129-9966-d336b1f71ddb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.918668] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1375.918913] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1375.919038] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Deleting the datastore file [datastore1] 842ca18a-d33b-4658-9d78-e51b5de0ea68 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.919292] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-040caaa2-5087-4638-a0c8-5ddc384030cf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.927257] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for the task: (returnval){ [ 1375.927257] env[69982]: value = "task-3865790" [ 1375.927257] env[69982]: _type = "Task" [ 1375.927257] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.936763] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865790, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.437476] env[69982]: DEBUG oslo_vmware.api [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Task: {'id': task-3865790, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158227} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.437748] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1376.438014] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1376.438242] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1376.438456] env[69982]: INFO nova.compute.manager [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1376.438733] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1376.439017] env[69982]: DEBUG nova.compute.manager [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1376.439152] env[69982]: DEBUG nova.network.neutron [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1376.728440] env[69982]: DEBUG nova.compute.manager [req-afede236-afca-4a2b-92b5-3894a0a62f2a req-7fa14eb1-2e24-4a00-8a43-56abd0bc209e service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Received event network-vif-deleted-e73e1d6c-fa11-4e6c-990b-d60db96b26ec {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1376.728645] env[69982]: INFO nova.compute.manager [req-afede236-afca-4a2b-92b5-3894a0a62f2a req-7fa14eb1-2e24-4a00-8a43-56abd0bc209e service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Neutron deleted interface e73e1d6c-fa11-4e6c-990b-d60db96b26ec; detaching it from the instance and deleting it from the info cache [ 1376.728802] env[69982]: DEBUG nova.network.neutron [req-afede236-afca-4a2b-92b5-3894a0a62f2a req-7fa14eb1-2e24-4a00-8a43-56abd0bc209e service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.845772] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1376.846173] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.211106] env[69982]: DEBUG nova.network.neutron [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.231484] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21ca63b6-f5aa-41be-832c-c99a61eeadf6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.243399] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bb1072-8aa4-49e3-9253-be235a60e494 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.274022] env[69982]: DEBUG nova.compute.manager [req-afede236-afca-4a2b-92b5-3894a0a62f2a req-7fa14eb1-2e24-4a00-8a43-56abd0bc209e service nova] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Detach interface failed, port_id=e73e1d6c-fa11-4e6c-990b-d60db96b26ec, reason: Instance 842ca18a-d33b-4658-9d78-e51b5de0ea68 could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1377.349328] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Starting instance... {{(pid=69982) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1377.715806] env[69982]: INFO nova.compute.manager [-] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Took 1.28 seconds to deallocate network for instance. [ 1377.873634] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.874107] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.876223] env[69982]: INFO nova.compute.claims [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1378.221978] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1378.942442] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7374f4-459c-4c14-a3cf-7b5208c762c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.951021] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea402c9-f6a6-4d66-abf6-a8451bed7e32 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.980545] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dfd9a09-ec82-472a-9a4d-31fd41544247 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.988052] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecc0878-a208-49c6-a0aa-6ae4bcbb1c90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.002231] env[69982]: DEBUG nova.compute.provider_tree [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1379.505520] env[69982]: DEBUG nova.scheduler.client.report [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1380.011057] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.137s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.011568] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Start building networks asynchronously for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1380.014315] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.793s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.015428] env[69982]: DEBUG nova.objects.instance [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lazy-loading 'resources' on Instance uuid 842ca18a-d33b-4658-9d78-e51b5de0ea68 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.518427] env[69982]: DEBUG nova.compute.utils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1380.522352] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Allocating IP information in the background. {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1380.522352] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] allocate_for_instance() {{(pid=69982) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1380.574452] env[69982]: DEBUG nova.policy [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70dce0073b9a4286bf932a4a14ab41b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0881ea51765b49b1a571d0957eef4bd1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69982) authorize /opt/stack/nova/nova/policy.py:192}} [ 1380.580796] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-490b7ef2-3e1f-40c0-a97d-de120af3c1e3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.588787] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82e51520-b294-4ee6-afaa-31e5ed9f3a64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.621956] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974ecace-086b-4292-b2c9-9229f57a2159 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.630507] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46525427-5743-4cf4-b938-66da222bf3ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.644113] env[69982]: DEBUG nova.compute.provider_tree [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1380.864748] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Successfully created port: 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1381.023091] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Start building block device mappings for instance. {{(pid=69982) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1381.147502] env[69982]: DEBUG nova.scheduler.client.report [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1381.528643] env[69982]: INFO nova.virt.block_device [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Booting with volume 40c0390a-c463-4307-82f3-688f52cae521 at /dev/sda [ 1381.565576] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5a60a28-557b-4dda-9f92-961b623dd068 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.575700] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2537b95-133a-46bf-b57b-2ce9b888e9a2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.601754] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe62b0a7-da4a-46f4-939c-ebe060fcdf5b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.611396] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa622b7-76d5-4213-be2a-0de4ca250d30 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.637881] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4316a642-6ad5-407a-82d9-815b7eb2ca64 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.644800] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4a7b87-2404-4224-9893-efe1f44c028d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.653119] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1381.658949] env[69982]: DEBUG nova.virt.block_device [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating existing volume attachment record: 951a8247-9452-41d9-b706-1e98dd1cbad4 {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1381.673749] env[69982]: INFO nova.scheduler.client.report [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Deleted allocations for instance 842ca18a-d33b-4658-9d78-e51b5de0ea68 [ 1382.182695] env[69982]: DEBUG oslo_concurrency.lockutils [None req-4899d232-3224-466f-b983-4775fdd9ff7f tempest-ServersNegativeTestMultiTenantJSON-2140176665 tempest-ServersNegativeTestMultiTenantJSON-2140176665-project-member] Lock "842ca18a-d33b-4658-9d78-e51b5de0ea68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.377s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.265952] env[69982]: DEBUG nova.compute.manager [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Received event network-vif-plugged-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1382.266234] env[69982]: DEBUG oslo_concurrency.lockutils [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] Acquiring lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.266444] env[69982]: DEBUG oslo_concurrency.lockutils [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.266601] env[69982]: DEBUG oslo_concurrency.lockutils [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.266762] env[69982]: DEBUG nova.compute.manager [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] No waiting events found dispatching network-vif-plugged-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1382.266929] env[69982]: WARNING nova.compute.manager [req-c55a15dd-bac2-4f6b-a352-3cf97bc48889 req-c889e74a-9a37-4eb5-ba5c-37eaa673159f service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Received unexpected event network-vif-plugged-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d for instance with vm_state building and task_state block_device_mapping. [ 1382.353708] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Successfully updated port: 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1382.858720] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.858899] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquired lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1382.859065] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1383.394148] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance cache missing network info. {{(pid=69982) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3404}} [ 1383.523497] env[69982]: DEBUG nova.network.neutron [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating instance_info_cache with network_info: [{"id": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "address": "fa:16:3e:cb:47:de", "network": {"id": "8c0dc046-4103-411a-92fd-8c9854c550dd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-261855695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0881ea51765b49b1a571d0957eef4bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb08d3e-9e", "ovs_interfaceid": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.649239] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.649484] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.748012] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Start spawning the instance on the hypervisor. {{(pid=69982) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1383.748568] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1383.748777] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.748930] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1383.749122] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.749267] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1383.749414] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1383.749620] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1383.749772] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1383.749934] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1383.750109] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1383.750284] env[69982]: DEBUG nova.virt.hardware [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1383.751189] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d16066d-940d-46b8-a8ac-b0b255163355 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.760257] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8ee431-07bf-4f35-890f-cd74527fb6ef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.025888] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Releasing lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1384.026307] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance network_info: |[{"id": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "address": "fa:16:3e:cb:47:de", "network": {"id": "8c0dc046-4103-411a-92fd-8c9854c550dd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-261855695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0881ea51765b49b1a571d0957eef4bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb08d3e-9e", "ovs_interfaceid": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69982) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1384.026740] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:47:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1384.034472] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Creating folder: Project (0881ea51765b49b1a571d0957eef4bd1). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1384.034768] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2bec9b16-bd90-48bd-8de8-97c096aff459 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.048544] env[69982]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1384.048716] env[69982]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69982) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1384.049111] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Folder already exists: Project (0881ea51765b49b1a571d0957eef4bd1). Parent ref: group-v767796. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1384.049329] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Creating folder: Instances. Parent ref: group-v768132. {{(pid=69982) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1384.049580] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-18eefbb4-00d6-4ff5-9459-303a466fde43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.060348] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Created folder: Instances in parent group-v768132. [ 1384.060622] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1384.060819] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1384.061393] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f133bbb-2982-4c0e-b7c4-2700ec71da86 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.081551] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1384.081551] env[69982]: value = "task-3865793" [ 1384.081551] env[69982]: _type = "Task" [ 1384.081551] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.090252] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865793, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.152823] env[69982]: DEBUG nova.compute.utils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1384.299459] env[69982]: DEBUG nova.compute.manager [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Received event network-changed-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1384.299680] env[69982]: DEBUG nova.compute.manager [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Refreshing instance network info cache due to event network-changed-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1384.299894] env[69982]: DEBUG oslo_concurrency.lockutils [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] Acquiring lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.300048] env[69982]: DEBUG oslo_concurrency.lockutils [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] Acquired lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1384.300608] env[69982]: DEBUG nova.network.neutron [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Refreshing network info cache for port 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1384.594531] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865793, 'name': CreateVM_Task, 'duration_secs': 0.298849} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.594531] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.598738] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'mount_device': '/dev/sda', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768135', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'name': 'volume-40c0390a-c463-4307-82f3-688f52cae521', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '902f02d7-6852-46fc-ace9-8abb0f36ee9e', 'attached_at': '', 'detached_at': '', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'serial': '40c0390a-c463-4307-82f3-688f52cae521'}, 'guest_format': None, 'attachment_id': '951a8247-9452-41d9-b706-1e98dd1cbad4', 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69982) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1384.599054] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Root volume attach. Driver type: vmdk {{(pid=69982) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1384.599952] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aec9ec9-5aaa-4184-82b0-190ce0096316 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.612470] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fffd644-9696-4a0a-9611-6b9585fcb510 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.619204] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b44872-a4b3-4b1f-9d3a-8cd387e5d66f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.626035] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4bf33060-cd4d-4f0f-8601-aa8ec5517a00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.634818] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1384.634818] env[69982]: value = "task-3865794" [ 1384.634818] env[69982]: _type = "Task" [ 1384.634818] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.643545] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865794, 'name': RelocateVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.655346] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.066176] env[69982]: DEBUG nova.network.neutron [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updated VIF entry in instance network info cache for port 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1385.066601] env[69982]: DEBUG nova.network.neutron [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating instance_info_cache with network_info: [{"id": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "address": "fa:16:3e:cb:47:de", "network": {"id": "8c0dc046-4103-411a-92fd-8c9854c550dd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-261855695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0881ea51765b49b1a571d0957eef4bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb08d3e-9e", "ovs_interfaceid": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.148332] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865794, 'name': RelocateVM_Task, 'duration_secs': 0.366362} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.148747] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1385.149034] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768135', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'name': 'volume-40c0390a-c463-4307-82f3-688f52cae521', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '902f02d7-6852-46fc-ace9-8abb0f36ee9e', 'attached_at': '', 'detached_at': '', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'serial': '40c0390a-c463-4307-82f3-688f52cae521'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1385.150185] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572a0ddc-8a75-41ab-87c7-13dad2e1cdf2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.174569] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50648350-f848-4218-afa8-51913752d9dc {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.197428] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] volume-40c0390a-c463-4307-82f3-688f52cae521/volume-40c0390a-c463-4307-82f3-688f52cae521.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1385.197751] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7105dea6-d4eb-439e-80af-e4b95384ec1e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.223419] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1385.223419] env[69982]: value = "task-3865795" [ 1385.223419] env[69982]: _type = "Task" [ 1385.223419] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.233768] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865795, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.570193] env[69982]: DEBUG oslo_concurrency.lockutils [req-ce5de34c-e304-45b9-8b27-16443374e3a3 req-c1645fdd-0118-4235-9a5c-c0eafbd88f29 service nova] Releasing lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1385.725510] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.725852] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.726031] env[69982]: INFO nova.compute.manager [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attaching volume 536ec489-9226-4deb-8ba0-fc538012c1cc to /dev/sdb [ 1385.737558] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865795, 'name': ReconfigVM_Task, 'duration_secs': 0.253315} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.737825] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Reconfigured VM instance instance-0000007f to attach disk [datastore1] volume-40c0390a-c463-4307-82f3-688f52cae521/volume-40c0390a-c463-4307-82f3-688f52cae521.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1385.742659] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9400f6f4-1169-4bf7-8d41-095b28adf202 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.759335] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1385.759335] env[69982]: value = "task-3865796" [ 1385.759335] env[69982]: _type = "Task" [ 1385.759335] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.764237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9f73e24-c2a8-403c-b7d1-a85d0a62ab90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.775905] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7c8c55-b7b1-4b6f-a7f6-9feab4bf6ad1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.778579] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865796, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.790754] env[69982]: DEBUG nova.virt.block_device [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating existing volume attachment record: 05e1a89d-6ae5-4509-ab0a-a10c909d088c {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1386.270176] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865796, 'name': ReconfigVM_Task, 'duration_secs': 0.156} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.270519] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768135', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'name': 'volume-40c0390a-c463-4307-82f3-688f52cae521', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '902f02d7-6852-46fc-ace9-8abb0f36ee9e', 'attached_at': '', 'detached_at': '', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'serial': '40c0390a-c463-4307-82f3-688f52cae521'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1386.271123] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e17cb71-4db8-4150-b069-7b8c88a69099 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.278774] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1386.278774] env[69982]: value = "task-3865798" [ 1386.278774] env[69982]: _type = "Task" [ 1386.278774] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.287321] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865798, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.789067] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865798, 'name': Rename_Task, 'duration_secs': 0.136115} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.789591] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1386.789685] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c514667e-c2f2-4b9d-a9f0-56f2ca3bad14 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.797455] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1386.797455] env[69982]: value = "task-3865799" [ 1386.797455] env[69982]: _type = "Task" [ 1386.797455] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.806145] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.308523] env[69982]: DEBUG oslo_vmware.api [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865799, 'name': PowerOnVM_Task, 'duration_secs': 0.425133} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.308806] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1387.309009] env[69982]: INFO nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Took 3.56 seconds to spawn the instance on the hypervisor. [ 1387.309198] env[69982]: DEBUG nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1387.309965] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9605ad9e-9e5e-4368-bc99-9d5e5c28afa1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.827757] env[69982]: INFO nova.compute.manager [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Took 9.97 seconds to build instance. [ 1388.329727] env[69982]: DEBUG oslo_concurrency.lockutils [None req-1a273bad-6efc-4a7c-8030-d985640e795b tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.483s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1388.917300] env[69982]: DEBUG nova.compute.manager [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Received event network-changed-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1388.917610] env[69982]: DEBUG nova.compute.manager [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Refreshing instance network info cache due to event network-changed-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1388.917695] env[69982]: DEBUG oslo_concurrency.lockutils [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] Acquiring lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1388.917871] env[69982]: DEBUG oslo_concurrency.lockutils [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] Acquired lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1388.918010] env[69982]: DEBUG nova.network.neutron [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Refreshing network info cache for port 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1389.619835] env[69982]: DEBUG nova.network.neutron [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updated VIF entry in instance network info cache for port 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1389.620261] env[69982]: DEBUG nova.network.neutron [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating instance_info_cache with network_info: [{"id": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "address": "fa:16:3e:cb:47:de", "network": {"id": "8c0dc046-4103-411a-92fd-8c9854c550dd", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-261855695-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0881ea51765b49b1a571d0957eef4bd1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1fb08d3e-9e", "ovs_interfaceid": "1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.123099] env[69982]: DEBUG oslo_concurrency.lockutils [req-b514b1f3-65fb-4951-b2ec-c00d50a426a6 req-0f8c2ebb-5b8f-46e2-a826-fb9eeecece4d service nova] Releasing lock "refresh_cache-902f02d7-6852-46fc-ace9-8abb0f36ee9e" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1390.336107] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1390.336380] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768141', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'name': 'volume-536ec489-9226-4deb-8ba0-fc538012c1cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'serial': '536ec489-9226-4deb-8ba0-fc538012c1cc'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1390.337355] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea8e416-a1a8-40af-b563-154ced2cb1d8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.356161] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718bde19-972b-44cc-8bd5-5a0b7977ce69 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.382711] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-536ec489-9226-4deb-8ba0-fc538012c1cc/volume-536ec489-9226-4deb-8ba0-fc538012c1cc.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.383024] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-492f8fb7-cae6-44b7-9846-2d928f1ab758 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.402359] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1390.402359] env[69982]: value = "task-3865801" [ 1390.402359] env[69982]: _type = "Task" [ 1390.402359] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.410966] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.912969] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.413907] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865801, 'name': ReconfigVM_Task, 'duration_secs': 0.592947} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.414289] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-536ec489-9226-4deb-8ba0-fc538012c1cc/volume-536ec489-9226-4deb-8ba0-fc538012c1cc.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.418986] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a86467e2-6a51-49aa-84f1-8559b00fc133 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.434338] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1391.434338] env[69982]: value = "task-3865802" [ 1391.434338] env[69982]: _type = "Task" [ 1391.434338] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.443197] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.944079] env[69982]: DEBUG oslo_vmware.api [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865802, 'name': ReconfigVM_Task, 'duration_secs': 0.288824} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.944404] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768141', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'name': 'volume-536ec489-9226-4deb-8ba0-fc538012c1cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'serial': '536ec489-9226-4deb-8ba0-fc538012c1cc'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1392.981240] env[69982]: DEBUG nova.objects.instance [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1393.486649] env[69982]: DEBUG oslo_concurrency.lockutils [None req-7d0ade5b-afcf-4bef-81a9-30835e309352 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.761s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1394.853625] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1394.853920] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.357782] env[69982]: DEBUG nova.compute.utils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Using /dev/sd instead of None {{(pid=69982) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1395.861567] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1396.919253] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1396.919605] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1396.919790] env[69982]: INFO nova.compute.manager [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attaching volume a128b9dc-49a0-4013-913f-760a414c305a to /dev/sdc [ 1396.954636] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa14a19-d290-4e51-85d3-4c11570314f9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.962614] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118da275-ec4a-4527-9e8d-016c581cc114 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.976576] env[69982]: DEBUG nova.virt.block_device [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating existing volume attachment record: 56e15145-e158-4166-b384-b2267eb8719c {{(pid=69982) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1397.034637] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1397.034935] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1397.035168] env[69982]: INFO nova.compute.manager [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Shelving [ 1398.044916] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1398.045309] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49ae6388-67cd-4cc3-89ad-def47302b071 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.055314] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1398.055314] env[69982]: value = "task-3865804" [ 1398.055314] env[69982]: _type = "Task" [ 1398.055314] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.064056] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865804, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.565076] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865804, 'name': PowerOffVM_Task, 'duration_secs': 0.207443} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.565341] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1398.566179] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2332b7-85bc-488f-80dd-1da3f146632d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.584876] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814ed229-b726-49b1-a4a2-1c792d4a6a77 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.095202] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Creating Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1399.095540] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9b537172-c0ec-478c-8e98-d9cf221dbaef {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.102818] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1399.102818] env[69982]: value = "task-3865805" [ 1399.102818] env[69982]: _type = "Task" [ 1399.102818] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.111507] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865805, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.615080] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865805, 'name': CreateSnapshot_Task, 'duration_secs': 0.426809} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.615547] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Created Snapshot of the VM instance {{(pid=69982) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1399.616568] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be89da8d-ceb6-4b8b-bb00-f1f7c05e7316 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.135041] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Creating linked-clone VM from snapshot {{(pid=69982) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1400.135387] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1b11f266-75a8-4ad2-9523-d125ed434603 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.143944] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1400.143944] env[69982]: value = "task-3865807" [ 1400.143944] env[69982]: _type = "Task" [ 1400.143944] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.152665] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865807, 'name': CloneVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.654287] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865807, 'name': CloneVM_Task} progress is 94%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.157581] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865807, 'name': CloneVM_Task} progress is 95%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.523103] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Volume attach. Driver type: vmdk {{(pid=69982) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1401.523421] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768142', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'name': 'volume-a128b9dc-49a0-4013-913f-760a414c305a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'serial': 'a128b9dc-49a0-4013-913f-760a414c305a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1401.524368] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25509e5-c20d-4b67-b089-41a97bff2e08 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.541200] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f248be18-c585-4db7-a3a2-ecf761193322 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.568877] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-a128b9dc-49a0-4013-913f-760a414c305a/volume-a128b9dc-49a0-4013-913f-760a414c305a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1401.569198] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-561fa9d5-e5dd-4b2b-9d60-c3ba011e8784 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.587182] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1401.587182] env[69982]: value = "task-3865808" [ 1401.587182] env[69982]: _type = "Task" [ 1401.587182] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.595170] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865808, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.656145] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865807, 'name': CloneVM_Task, 'duration_secs': 1.186437} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.656462] env[69982]: INFO nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Created linked-clone VM from snapshot [ 1401.657553] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dda3f80-70be-41f1-8cde-e9336451a688 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.666100] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Uploading image 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1401.690872] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1401.690872] env[69982]: value = "vm-768144" [ 1401.690872] env[69982]: _type = "VirtualMachine" [ 1401.690872] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1401.691212] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1ba2538b-8d02-496c-bf26-13b7ad4966d3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.699296] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease: (returnval){ [ 1401.699296] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fa7ce-db10-501f-f60c-2113160cabad" [ 1401.699296] env[69982]: _type = "HttpNfcLease" [ 1401.699296] env[69982]: } obtained for exporting VM: (result){ [ 1401.699296] env[69982]: value = "vm-768144" [ 1401.699296] env[69982]: _type = "VirtualMachine" [ 1401.699296] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1401.699593] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the lease: (returnval){ [ 1401.699593] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fa7ce-db10-501f-f60c-2113160cabad" [ 1401.699593] env[69982]: _type = "HttpNfcLease" [ 1401.699593] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1401.709070] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1401.709070] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fa7ce-db10-501f-f60c-2113160cabad" [ 1401.709070] env[69982]: _type = "HttpNfcLease" [ 1401.709070] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1402.097391] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865808, 'name': ReconfigVM_Task, 'duration_secs': 0.350013} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.097730] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-a128b9dc-49a0-4013-913f-760a414c305a/volume-a128b9dc-49a0-4013-913f-760a414c305a.vmdk or device None with type thin {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1402.102470] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc14022e-50f9-4209-b044-4e0a87007349 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.117731] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1402.117731] env[69982]: value = "task-3865810" [ 1402.117731] env[69982]: _type = "Task" [ 1402.117731] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.126078] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.208114] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1402.208114] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fa7ce-db10-501f-f60c-2113160cabad" [ 1402.208114] env[69982]: _type = "HttpNfcLease" [ 1402.208114] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1402.208655] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1402.208655] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]524fa7ce-db10-501f-f60c-2113160cabad" [ 1402.208655] env[69982]: _type = "HttpNfcLease" [ 1402.208655] env[69982]: }. {{(pid=69982) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1402.209296] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bad0b3c-374e-48dd-8dc8-a8a7878f2bb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.217333] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1402.217447] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk for reading. {{(pid=69982) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1402.307782] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-27e527af-860a-4fc1-9e80-c82f6eae4730 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.629283] env[69982]: DEBUG oslo_vmware.api [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865810, 'name': ReconfigVM_Task, 'duration_secs': 0.148005} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.629832] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768142', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'name': 'volume-a128b9dc-49a0-4013-913f-760a414c305a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'serial': 'a128b9dc-49a0-4013-913f-760a414c305a'} {{(pid=69982) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1403.670799] env[69982]: DEBUG nova.objects.instance [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1404.177146] env[69982]: DEBUG oslo_concurrency.lockutils [None req-a4925482-496c-43c6-8a49-7a49fbc86f60 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.257s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1404.449448] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1404.450096] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.953024] env[69982]: INFO nova.compute.manager [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Detaching volume 536ec489-9226-4deb-8ba0-fc538012c1cc [ 1404.989853] env[69982]: INFO nova.virt.block_device [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attempting to driver detach volume 536ec489-9226-4deb-8ba0-fc538012c1cc from mountpoint /dev/sdb [ 1404.990105] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1404.990308] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768141', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'name': 'volume-536ec489-9226-4deb-8ba0-fc538012c1cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'serial': '536ec489-9226-4deb-8ba0-fc538012c1cc'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1404.991246] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2373e013-9e04-44f7-ae67-4bb8bca2c374 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.019653] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76fd4fd8-d03b-4a43-aa51-4f4b2aedfc58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.027765] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6008ad97-f15f-45e1-935e-5cd0fc1f32bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.051789] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205f1a69-aa22-412c-bf79-9afe435af3ab {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.068019] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] The volume has not been displaced from its original location: [datastore2] volume-536ec489-9226-4deb-8ba0-fc538012c1cc/volume-536ec489-9226-4deb-8ba0-fc538012c1cc.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1405.073458] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1405.073797] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-850aa1de-a1f3-412f-88d3-dacc0ee2e5c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.093618] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1405.093618] env[69982]: value = "task-3865811" [ 1405.093618] env[69982]: _type = "Task" [ 1405.093618] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.102540] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.603479] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865811, 'name': ReconfigVM_Task, 'duration_secs': 0.273339} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.603774] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1405.608588] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01b3897b-e114-4ea3-966c-ef8ad0a99e1f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.624385] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1405.624385] env[69982]: value = "task-3865812" [ 1405.624385] env[69982]: _type = "Task" [ 1405.624385] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.633285] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.134760] env[69982]: DEBUG oslo_vmware.api [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865812, 'name': ReconfigVM_Task, 'duration_secs': 0.133169} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.135168] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768141', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'name': 'volume-536ec489-9226-4deb-8ba0-fc538012c1cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': '536ec489-9226-4deb-8ba0-fc538012c1cc', 'serial': '536ec489-9226-4deb-8ba0-fc538012c1cc'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1406.681945] env[69982]: DEBUG nova.objects.instance [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1407.689549] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d8764492-209f-42f7-8bd1-eb58e61017b3 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.240s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1407.706824] env[69982]: DEBUG oslo_concurrency.lockutils [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1407.707143] env[69982]: DEBUG oslo_concurrency.lockutils [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1408.210788] env[69982]: INFO nova.compute.manager [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Detaching volume a128b9dc-49a0-4013-913f-760a414c305a [ 1408.245287] env[69982]: INFO nova.virt.block_device [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Attempting to driver detach volume a128b9dc-49a0-4013-913f-760a414c305a from mountpoint /dev/sdc [ 1408.245702] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1408.245961] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768142', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'name': 'volume-a128b9dc-49a0-4013-913f-760a414c305a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'serial': 'a128b9dc-49a0-4013-913f-760a414c305a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1408.246982] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb04415-6194-4a8c-8673-d13b63d66174 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.270300] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076e283b-4ca2-4c28-9948-541e72286125 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.277796] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cdc9aa-9157-4a5b-9946-528996eccce4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.298415] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41cb5657-7c2a-4b85-9cbb-a54c5bdc91a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.314031] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] The volume has not been displaced from its original location: [datastore2] volume-a128b9dc-49a0-4013-913f-760a414c305a/volume-a128b9dc-49a0-4013-913f-760a414c305a.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1408.319867] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfiguring VM instance instance-0000007c to detach disk 2002 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1408.320341] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ee3083f-787d-4c74-ba38-594287a77df4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.339717] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1408.339717] env[69982]: value = "task-3865813" [ 1408.339717] env[69982]: _type = "Task" [ 1408.339717] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.348567] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.850783] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865813, 'name': ReconfigVM_Task, 'duration_secs': 0.241368} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.851131] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Reconfigured VM instance instance-0000007c to detach disk 2002 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1408.855937] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63324735-316d-43cd-b9c3-a4a26a6cea22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.871912] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1408.871912] env[69982]: value = "task-3865814" [ 1408.871912] env[69982]: _type = "Task" [ 1408.871912] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.884931] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865814, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.382039] env[69982]: DEBUG oslo_vmware.api [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865814, 'name': ReconfigVM_Task, 'duration_secs': 0.165257} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.382381] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768142', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'name': 'volume-a128b9dc-49a0-4013-913f-760a414c305a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8f0dacd5-59ec-495e-bac7-df2b76883562', 'attached_at': '', 'detached_at': '', 'volume_id': 'a128b9dc-49a0-4013-913f-760a414c305a', 'serial': 'a128b9dc-49a0-4013-913f-760a414c305a'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1409.929334] env[69982]: DEBUG nova.objects.instance [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'flavor' on Instance uuid 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1409.971273] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1409.972240] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d5a56b-022d-45d9-9200-c54cdba705ed {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.981339] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1409.981486] env[69982]: ERROR oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk due to incomplete transfer. [ 1409.981721] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5ee33756-c700-4ada-8507-d706c806f37d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.990986] env[69982]: DEBUG oslo_vmware.rw_handles [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521be261-fc1f-d449-05e0-8520c02752d7/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1409.991206] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Uploaded image 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 to the Glance image server {{(pid=69982) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1409.993666] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Destroying the VM {{(pid=69982) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1409.993920] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1726edd2-b562-495e-9ad2-c3a28f73ef1a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.000735] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1410.000735] env[69982]: value = "task-3865815" [ 1410.000735] env[69982]: _type = "Task" [ 1410.000735] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.009355] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865815, 'name': Destroy_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.511678] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865815, 'name': Destroy_Task, 'duration_secs': 0.327972} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.511953] env[69982]: INFO nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Destroyed the VM [ 1410.512252] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleting Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1410.512520] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-16bbda09-fa0d-481f-b900-ce92e4bf9d53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.519957] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1410.519957] env[69982]: value = "task-3865816" [ 1410.519957] env[69982]: _type = "Task" [ 1410.519957] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.527893] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865816, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.937049] env[69982]: DEBUG oslo_concurrency.lockutils [None req-437943e0-0db5-4c3b-a9f6-cf1a8ac5da8d tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.230s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.030497] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865816, 'name': RemoveSnapshot_Task, 'duration_secs': 0.388538} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.030754] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleted Snapshot of the VM instance {{(pid=69982) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1411.031037] env[69982]: DEBUG nova.compute.manager [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1411.031808] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e37faf-2f3c-4c7d-987c-82dd12082382 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.545036] env[69982]: INFO nova.compute.manager [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Shelve offloading [ 1412.049059] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.049449] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b989481-2680-4064-a257-728a84b39725 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.056864] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1412.056864] env[69982]: value = "task-3865817" [ 1412.056864] env[69982]: _type = "Task" [ 1412.056864] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.064706] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.104303] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.104552] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.104767] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1412.104947] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.105142] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.107274] env[69982]: INFO nova.compute.manager [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Terminating instance [ 1412.568248] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1412.568491] env[69982]: DEBUG nova.compute.manager [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1412.569299] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31e953a9-939f-417e-9bde-7ff55a994c5f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.575616] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.575781] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1412.575954] env[69982]: DEBUG nova.network.neutron [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1412.611035] env[69982]: DEBUG nova.compute.manager [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1412.611187] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1412.612031] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc4eaaf-cca4-411b-8b8b-be8c4b1aebaf {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.619788] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.620038] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7de29f1-a720-419a-8745-7669f69cf2e6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.625760] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1412.625760] env[69982]: value = "task-3865818" [ 1412.625760] env[69982]: _type = "Task" [ 1412.625760] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.635965] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865818, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.135486] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865818, 'name': PowerOffVM_Task, 'duration_secs': 0.249416} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.135799] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.137040] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.137040] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22b35180-46a2-4c56-b203-c5fcc3d590bb {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.227540] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1413.227812] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1413.228057] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleting the datastore file [datastore1] 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.228409] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96c64e86-8dea-4cc4-ba58-e8976afe7b53 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.236119] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for the task: (returnval){ [ 1413.236119] env[69982]: value = "task-3865820" [ 1413.236119] env[69982]: _type = "Task" [ 1413.236119] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.247100] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865820, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.288607] env[69982]: DEBUG nova.network.neutron [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.746969] env[69982]: DEBUG oslo_vmware.api [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Task: {'id': task-3865820, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131206} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.747267] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1413.747456] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1413.747639] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1413.747818] env[69982]: INFO nova.compute.manager [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1413.748078] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1413.748280] env[69982]: DEBUG nova.compute.manager [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1413.748409] env[69982]: DEBUG nova.network.neutron [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1413.791014] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1414.169283] env[69982]: DEBUG nova.compute.manager [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-vif-unplugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1414.169549] env[69982]: DEBUG oslo_concurrency.lockutils [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1414.169835] env[69982]: DEBUG oslo_concurrency.lockutils [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1414.169898] env[69982]: DEBUG oslo_concurrency.lockutils [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.170032] env[69982]: DEBUG nova.compute.manager [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] No waiting events found dispatching network-vif-unplugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1414.170204] env[69982]: WARNING nova.compute.manager [req-09f9a245-85e5-4975-85c1-156592c3a869 req-adf0cf2b-09ef-4b88-90c2-d3a7e5459d4a service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received unexpected event network-vif-unplugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d for instance with vm_state shelved and task_state shelving_offloading. [ 1414.332366] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1414.333485] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0191fac3-57df-40c8-b3d6-d5988a3ab500 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.341341] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1414.342038] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eae7ead-32a6-4da8-aef2-1b2eb8e8ae90 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.537706] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1414.537900] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1414.538077] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore1] 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1414.538367] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e8ca774-3927-4c44-9c08-9787c5210ae3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.547108] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1414.547108] env[69982]: value = "task-3865822" [ 1414.547108] env[69982]: _type = "Task" [ 1414.547108] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.556360] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.911300] env[69982]: DEBUG nova.network.neutron [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.058833] env[69982]: DEBUG oslo_vmware.api [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186919} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.058833] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1415.058833] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1415.058979] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1415.084261] env[69982]: INFO nova.scheduler.client.report [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted allocations for instance 86b67d38-95b6-46cf-bf7c-524a06773c0c [ 1415.413897] env[69982]: INFO nova.compute.manager [-] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Took 1.67 seconds to deallocate network for instance. [ 1415.589670] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.589670] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.589852] env[69982]: DEBUG nova.objects.instance [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'resources' on Instance uuid 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1415.920893] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1416.093114] env[69982]: DEBUG nova.objects.instance [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'numa_topology' on Instance uuid 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1416.203417] env[69982]: DEBUG nova.compute.manager [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1416.203628] env[69982]: DEBUG nova.compute.manager [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing instance network info cache due to event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1416.203840] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.204082] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.204216] env[69982]: DEBUG nova.network.neutron [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1416.596649] env[69982]: DEBUG nova.objects.base [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Object Instance<86b67d38-95b6-46cf-bf7c-524a06773c0c> lazy-loaded attributes: resources,numa_topology {{(pid=69982) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1416.645032] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43893734-661f-4fac-ac7b-3c293c298278 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.653557] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d44b538-83c6-4921-ac6b-514620b06e31 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.686455] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea828347-f74e-4816-81d7-8eb4efa8c9c4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.694733] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bc7745-4636-44cd-bc19-a0901f2ca760 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.711898] env[69982]: DEBUG nova.compute.provider_tree [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1416.923900] env[69982]: DEBUG nova.network.neutron [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updated VIF entry in instance network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1416.924327] env[69982]: DEBUG nova.network.neutron [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap7e64b942-be", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.214730] env[69982]: DEBUG nova.scheduler.client.report [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1417.430160] env[69982]: DEBUG oslo_concurrency.lockutils [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1417.430440] env[69982]: DEBUG nova.compute.manager [req-b1b4147a-7bd6-4203-9c97-ed3c9468d722 req-4345a519-4c07-4be1-96a2-561b520821f5 service nova] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Received event network-vif-deleted-6c285c0e-e718-4241-ac88-8de36e44a62f {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1417.720032] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.130s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1417.722542] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.802s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1417.722768] env[69982]: DEBUG nova.objects.instance [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lazy-loading 'resources' on Instance uuid 8f0dacd5-59ec-495e-bac7-df2b76883562 {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1417.801118] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1418.232429] env[69982]: DEBUG oslo_concurrency.lockutils [None req-011d3504-fc37-4b1a-9826-2089550ab85b tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.197s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1418.234104] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.433s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1418.234395] env[69982]: INFO nova.compute.manager [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Unshelving [ 1418.286251] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5ab447-6ded-4248-bc10-19283654519e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.295408] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e55b627-1196-45f9-8bb5-20609e5dc4e5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.326145] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf10df2-776d-43d4-8db9-15be629f423c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.333852] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be26fea9-f410-4a94-82d1-f55471669379 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.347290] env[69982]: DEBUG nova.compute.provider_tree [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1418.850840] env[69982]: DEBUG nova.scheduler.client.report [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1419.256901] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.355521] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1419.358113] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.101s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.358350] env[69982]: DEBUG nova.objects.instance [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'pci_requests' on Instance uuid 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.375766] env[69982]: INFO nova.scheduler.client.report [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Deleted allocations for instance 8f0dacd5-59ec-495e-bac7-df2b76883562 [ 1419.862710] env[69982]: DEBUG nova.objects.instance [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'numa_topology' on Instance uuid 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1419.884183] env[69982]: DEBUG oslo_concurrency.lockutils [None req-94fad1bd-ef1b-4a70-a076-2a8002729ca9 tempest-AttachVolumeTestJSON-1976663256 tempest-AttachVolumeTestJSON-1976663256-project-member] Lock "8f0dacd5-59ec-495e-bac7-df2b76883562" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.779s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.365397] env[69982]: INFO nova.compute.claims [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1421.417391] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7e3362-609b-44b1-a42a-9eddc2f44bf1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.425192] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b82db5-7d20-45d9-888f-8ef81b368575 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.456237] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873a5307-0bef-4a4f-8503-6b9d3d83ca79 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.464215] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754d0059-f9fa-4a56-8b9e-21bf7ea640ea {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.479023] env[69982]: DEBUG nova.compute.provider_tree [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.981973] env[69982]: DEBUG nova.scheduler.client.report [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1422.487326] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.129s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.515418] env[69982]: INFO nova.network.neutron [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating port 7e64b942-be80-4f11-8eaa-9e4f6219c94d with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1423.886095] env[69982]: DEBUG nova.compute.manager [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1423.886351] env[69982]: DEBUG oslo_concurrency.lockutils [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1423.886559] env[69982]: DEBUG oslo_concurrency.lockutils [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1423.886872] env[69982]: DEBUG oslo_concurrency.lockutils [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1423.887236] env[69982]: DEBUG nova.compute.manager [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] No waiting events found dispatching network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1423.887577] env[69982]: WARNING nova.compute.manager [req-55171960-06b7-46d1-99b8-1b3aca335315 req-7d81f258-48fd-4150-a09e-3fea1fc2b983 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received unexpected event network-vif-plugged-7e64b942-be80-4f11-8eaa-9e4f6219c94d for instance with vm_state shelved_offloaded and task_state spawning. [ 1423.977555] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.977861] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1423.978143] env[69982]: DEBUG nova.network.neutron [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Building network info cache for instance {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2074}} [ 1424.287053] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.719563] env[69982]: DEBUG nova.network.neutron [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.222718] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.246735] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-05-07T07:03:17Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='0dfc99c1632f5837c19c2bdca4718639',container_format='bare',created_at=2025-05-07T07:17:49Z,direct_url=,disk_format='vmdk',id=5ad07c95-ab1e-4c9f-9a71-09dea5249e71,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-594698024-shelved',owner='7372e00e1966430da6131e02f199ba14',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-05-07T07:18:02Z,virtual_size=,visibility=), allow threads: False {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1425.246984] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1425.247156] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image limits 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1425.247348] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Flavor pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1425.247550] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Image pref 0:0:0 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1425.247706] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69982) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1425.247917] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1425.248088] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1425.248263] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Got 1 possible topologies {{(pid=69982) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1425.248425] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1425.248625] env[69982]: DEBUG nova.virt.hardware [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69982) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1425.249804] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f11344-4c04-47d7-aef2-e4fffdae8977 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.258316] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7aba52-f5ad-499e-9095-27b17648d146 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.272046] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:20:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e64b942-be80-4f11-8eaa-9e4f6219c94d', 'vif_model': 'vmxnet3'}] {{(pid=69982) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1425.279504] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1425.279748] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Creating VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1425.279957] env[69982]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2566092-87ee-45c5-9b5e-f4fbe2d1bc0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.293893] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.299763] env[69982]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1425.299763] env[69982]: value = "task-3865825" [ 1425.299763] env[69982]: _type = "Task" [ 1425.299763] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.307698] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865825, 'name': CreateVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.809832] env[69982]: DEBUG oslo_vmware.api [-] Task: {'id': task-3865825, 'name': CreateVM_Task, 'duration_secs': 0.296263} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.811081] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Created VM on the ESX host {{(pid=69982) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1425.811081] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.811081] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1425.811483] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1425.811736] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e0320d1-2586-470e-909c-17306038f1f7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.816031] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1425.816031] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5267277c-7873-9c8d-b8bb-5d0f77604a74" [ 1425.816031] env[69982]: _type = "Task" [ 1425.816031] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.823810] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5267277c-7873-9c8d-b8bb-5d0f77604a74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.834361] env[69982]: INFO nova.compute.manager [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Rebuilding instance [ 1425.873136] env[69982]: DEBUG nova.compute.manager [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1425.874075] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a112529-b7d7-4c1b-acae-c5d091e4784d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.913110] env[69982]: DEBUG nova.compute.manager [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1425.913313] env[69982]: DEBUG nova.compute.manager [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing instance network info cache due to event network-changed-7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1425.913548] env[69982]: DEBUG oslo_concurrency.lockutils [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] Acquiring lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.913734] env[69982]: DEBUG oslo_concurrency.lockutils [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] Acquired lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1425.913971] env[69982]: DEBUG nova.network.neutron [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Refreshing network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2071}} [ 1426.327356] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.327757] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Processing image 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1426.327913] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.328030] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1426.328218] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.328482] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6ad0287-58a6-4af6-8dc2-1078fbaaa4f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.347183] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.347380] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69982) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1426.348143] env[69982]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de6f917b-aa30-4dae-9307-91ef067f2a13 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.353486] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1426.353486] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52492477-1451-cf5a-cc3c-b4d84da9edfe" [ 1426.353486] env[69982]: _type = "Task" [ 1426.353486] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.361520] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': session[5292cb0b-d4ae-0ba2-75de-b718480ca068]52492477-1451-cf5a-cc3c-b4d84da9edfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.686864] env[69982]: DEBUG nova.network.neutron [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updated VIF entry in instance network info cache for port 7e64b942-be80-4f11-8eaa-9e4f6219c94d. {{(pid=69982) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3563}} [ 1426.687265] env[69982]: DEBUG nova.network.neutron [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [{"id": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "address": "fa:16:3e:e6:20:5d", "network": {"id": "7706fbc1-c30a-4e06-b55a-a1c4a23bd8ea", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1589705535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.167", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7372e00e1966430da6131e02f199ba14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e64b942-be", "ovs_interfaceid": "7e64b942-be80-4f11-8eaa-9e4f6219c94d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.863885] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Preparing fetch location {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1426.864202] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Fetch image to [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45/OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45.vmdk {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1426.864411] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Downloading stream optimized image 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 to [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45/OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45.vmdk on the data store datastore2 as vApp {{(pid=69982) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1426.864596] env[69982]: DEBUG nova.virt.vmwareapi.images [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Downloading image file data 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 to the ESX as VM named 'OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45' {{(pid=69982) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1426.889251] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1426.906089] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-839e84c1-47c6-4282-ae8f-849c5804365b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.914794] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1426.914794] env[69982]: value = "task-3865826" [ 1426.914794] env[69982]: _type = "Task" [ 1426.914794] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.926362] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.947416] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1426.947416] env[69982]: value = "resgroup-9" [ 1426.947416] env[69982]: _type = "ResourcePool" [ 1426.947416] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1426.947635] env[69982]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3d48e8cd-a53f-4bed-829d-413a0769a20f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.969643] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease: (returnval){ [ 1426.969643] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217c25c-1bb3-9346-7471-8ce55fe3cfaf" [ 1426.969643] env[69982]: _type = "HttpNfcLease" [ 1426.969643] env[69982]: } obtained for vApp import into resource pool (val){ [ 1426.969643] env[69982]: value = "resgroup-9" [ 1426.969643] env[69982]: _type = "ResourcePool" [ 1426.969643] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1426.969951] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the lease: (returnval){ [ 1426.969951] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217c25c-1bb3-9346-7471-8ce55fe3cfaf" [ 1426.969951] env[69982]: _type = "HttpNfcLease" [ 1426.969951] env[69982]: } to be ready. {{(pid=69982) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1426.979213] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1426.979213] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217c25c-1bb3-9346-7471-8ce55fe3cfaf" [ 1426.979213] env[69982]: _type = "HttpNfcLease" [ 1426.979213] env[69982]: } is initializing. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1427.193329] env[69982]: DEBUG oslo_concurrency.lockutils [req-89959217-291e-45a8-8e6c-77f10e3c972a req-6a3e44d7-9d03-45b4-b3ce-917df0dd9b81 service nova] Releasing lock "refresh_cache-86b67d38-95b6-46cf-bf7c-524a06773c0c" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1427.279783] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.424717] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865826, 'name': PowerOffVM_Task, 'duration_secs': 0.195999} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.425101] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1427.425761] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1427.426038] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb351a48-d387-41e1-bfd4-9bec20b7f8e4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.433026] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1427.433026] env[69982]: value = "task-3865829" [ 1427.433026] env[69982]: _type = "Task" [ 1427.433026] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.440752] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865829, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.478028] env[69982]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1427.478028] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217c25c-1bb3-9346-7471-8ce55fe3cfaf" [ 1427.478028] env[69982]: _type = "HttpNfcLease" [ 1427.478028] env[69982]: } is ready. {{(pid=69982) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1427.478323] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1427.478323] env[69982]: value = "session[5292cb0b-d4ae-0ba2-75de-b718480ca068]5217c25c-1bb3-9346-7471-8ce55fe3cfaf" [ 1427.478323] env[69982]: _type = "HttpNfcLease" [ 1427.478323] env[69982]: }. {{(pid=69982) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1427.479065] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5df58-956f-4802-9d14-62dcbcb39a23 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.486535] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk from lease info. {{(pid=69982) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1427.486709] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk. {{(pid=69982) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1427.551422] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c27a58a9-e942-422b-9b76-2ea0b9637fd8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.947978] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] VM already powered off {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1427.948221] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1427.948475] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768135', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'name': 'volume-40c0390a-c463-4307-82f3-688f52cae521', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '902f02d7-6852-46fc-ace9-8abb0f36ee9e', 'attached_at': '', 'detached_at': '', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'serial': '40c0390a-c463-4307-82f3-688f52cae521'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1427.949375] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d845dd5-199a-4a9c-a579-555ef793f77e {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.977023] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16376450-adc8-4560-afb8-dbd88bfc0063 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.987531] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d10637a-8c2f-4ff4-b1d9-55ddbbfb5b0f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.013501] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da475c6-937c-462b-8974-8f7cec053eb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.029676] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] The volume has not been displaced from its original location: [datastore1] volume-40c0390a-c463-4307-82f3-688f52cae521/volume-40c0390a-c463-4307-82f3-688f52cae521.vmdk. No consolidation needed. {{(pid=69982) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1428.035342] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1428.039975] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3090fa93-0a6f-4aa0-af33-ddcbae1a784c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.059547] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1428.059547] env[69982]: value = "task-3865830" [ 1428.059547] env[69982]: _type = "Task" [ 1428.059547] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.070207] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865830, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.285990] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1428.571011] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865830, 'name': ReconfigVM_Task, 'duration_secs': 0.223942} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.572788] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=69982) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1428.577810] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-257a4253-00de-4ef3-b0f4-f5b188cedd0a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.596668] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1428.596668] env[69982]: value = "task-3865831" [ 1428.596668] env[69982]: _type = "Task" [ 1428.596668] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.606644] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865831, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.682826] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Completed reading data from the image iterator. {{(pid=69982) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1428.683147] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1428.684115] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f6c5f5-ee4e-440d-bbfc-c3742730db40 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.690836] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk is in state: ready. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1428.691063] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk. {{(pid=69982) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1428.691360] env[69982]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-b656d376-6cb2-4962-8978-0b4767c0664b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.915034] env[69982]: DEBUG oslo_vmware.rw_handles [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f16c69-49c7-2d9d-f409-053b8c4f57cb/disk-0.vmdk. {{(pid=69982) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1428.915284] env[69982]: INFO nova.virt.vmwareapi.images [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Downloaded image file data 5ad07c95-ab1e-4c9f-9a71-09dea5249e71 [ 1428.916212] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c1ae07-edb3-4ad3-abe0-7af6725ffbb4 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.935420] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51b33215-57db-40fe-80a8-7c42b6674a5d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.961722] env[69982]: INFO nova.virt.vmwareapi.images [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] The imported VM was unregistered [ 1428.964348] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Caching image {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1428.964615] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1428.964919] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d4b5a0f4-7d8e-4e6b-b1f2-1bc7c83c63a6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.976504] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Created directory with path [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71 {{(pid=69982) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1428.976632] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45/OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45.vmdk to [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk. {{(pid=69982) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1428.976906] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-63789945-b4d9-4f72-8db2-55a805f2b185 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.984325] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1428.984325] env[69982]: value = "task-3865833" [ 1428.984325] env[69982]: _type = "Task" [ 1428.984325] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.993023] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.106627] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865831, 'name': ReconfigVM_Task, 'duration_secs': 0.121847} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.106951] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-768135', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'name': 'volume-40c0390a-c463-4307-82f3-688f52cae521', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '902f02d7-6852-46fc-ace9-8abb0f36ee9e', 'attached_at': '', 'detached_at': '', 'volume_id': '40c0390a-c463-4307-82f3-688f52cae521', 'serial': '40c0390a-c463-4307-82f3-688f52cae521'} {{(pid=69982) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1429.107249] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1429.108089] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eefa81a6-da52-4805-a6ac-8a9372c9b497 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.116563] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1429.116800] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28d085fc-6bae-4c16-b1c2-ce90ff15df92 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.189865] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1429.189865] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Deleting contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1429.190167] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Deleting the datastore file [datastore1] 902f02d7-6852-46fc-ace9-8abb0f36ee9e {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1429.190308] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99fca1ac-351a-49a7-8d3a-026aeb4ebba6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.198711] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for the task: (returnval){ [ 1429.198711] env[69982]: value = "task-3865835" [ 1429.198711] env[69982]: _type = "Task" [ 1429.198711] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.209106] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865835, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.284919] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.495430] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.710698] env[69982]: DEBUG oslo_vmware.api [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Task: {'id': task-3865835, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189431} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.711090] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.711157] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Deleted contents of the VM from datastore datastore1 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.711335] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.769516] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Volume detach. Driver type: vmdk {{(pid=69982) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1429.769938] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbb798c0-03a9-409a-9ab3-f55ee4b21b0b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.781150] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-863828b5-37f4-4b74-94bf-85bebdac7703 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.808453] env[69982]: ERROR nova.compute.manager [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Failed to detach volume 40c0390a-c463-4307-82f3-688f52cae521 from /dev/sda: nova.exception.InstanceNotFound: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Traceback (most recent call last): [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self.driver.rebuild(**kwargs) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise NotImplementedError() [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] NotImplementedError [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] During handling of the above exception, another exception occurred: [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Traceback (most recent call last): [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self.driver.detach_volume(context, old_connection_info, [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] return self._volumeops.detach_volume(connection_info, instance) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._detach_volume_vmdk(connection_info, instance) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] stable_ref.fetch_moref(session) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] nova.exception.InstanceNotFound: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. [ 1429.808453] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.986414] env[69982]: DEBUG nova.compute.utils [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Build of instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e aborted: Failed to rebuild volume backed instance. {{(pid=69982) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1429.992409] env[69982]: ERROR nova.compute.manager [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e aborted: Failed to rebuild volume backed instance. [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Traceback (most recent call last): [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self.driver.rebuild(**kwargs) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise NotImplementedError() [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] NotImplementedError [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] During handling of the above exception, another exception occurred: [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Traceback (most recent call last): [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._detach_root_volume(context, instance, root_bdm) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] with excutils.save_and_reraise_exception(): [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self.force_reraise() [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise self.value [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self.driver.detach_volume(context, old_connection_info, [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] return self._volumeops.detach_volume(connection_info, instance) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._detach_volume_vmdk(connection_info, instance) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] stable_ref.fetch_moref(session) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] nova.exception.InstanceNotFound: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] During handling of the above exception, another exception occurred: [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Traceback (most recent call last): [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] yield [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1429.992409] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._do_rebuild_instance_with_claim( [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._do_rebuild_instance( [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._rebuild_default_impl(**kwargs) [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] self._rebuild_volume_backed_instance( [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] raise exception.BuildAbortException( [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] nova.exception.BuildAbortException: Build of instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e aborted: Failed to rebuild volume backed instance. [ 1429.993545] env[69982]: ERROR nova.compute.manager [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] [ 1430.001521] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.284644] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.501388] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.001779] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.284127] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.284331] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1431.284554] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.500068] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865833, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.501852} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.500358] env[69982]: INFO nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45/OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45.vmdk to [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk. [ 1431.500561] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Cleaning up location [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45 {{(pid=69982) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1431.500730] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_7af3a80f-b770-4bda-b3ca-351ca2a0fe45 {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.501016] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01427ab6-aa24-476f-ae05-f3b768b94d82 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.510718] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1431.510718] env[69982]: value = "task-3865836" [ 1431.510718] env[69982]: _type = "Task" [ 1431.510718] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.520544] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.792031] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.792319] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.792524] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.792687] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1431.793698] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fc5b24-8c2c-447e-9694-1ab72bb5d786 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.802921] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cee7712-14da-4aab-ad68-12cf96273aec {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.819290] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0572f23b-2a26-4d40-8458-18903a7e542b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.827388] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab60c28-cdcb-435c-93cc-1dd57e41c913 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.858616] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180404MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1431.858799] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.858989] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.012625] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.023357] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043091} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.023634] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1432.023821] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk" {{(pid=69982) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1432.024085] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk to [datastore2] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1432.024352] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9636d17-e1ca-45fa-a92e-5909e4dda8c8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.031582] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1432.031582] env[69982]: value = "task-3865837" [ 1432.031582] env[69982]: _type = "Task" [ 1432.031582] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.040952] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.542847] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.889213] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Instance 86b67d38-95b6-46cf-bf7c-524a06773c0c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69982) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1432.889486] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1432.889605] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1432.920627] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9641ee-68ce-4abe-ab2f-c3e650dbbc5c {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.929841] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326fe2ca-9d97-4331-b4bb-27d1767183c9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.934325] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.934955] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.935217] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.935621] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.935913] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.967475] env[69982]: INFO nova.compute.manager [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Terminating instance [ 1432.970038] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f648783-85ae-4365-8256-fcd35fdfda00 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.982646] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08fa3373-1da6-45f9-95a0-702cbcd15552 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.001488] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.043556] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.476681] env[69982]: DEBUG nova.compute.manager [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1433.477097] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ec629d3-5dfb-4385-98f2-4446cf910be6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.488509] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bcb130-4cfe-49b4-a21a-4d8ab47d61c1 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.505114] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1433.517714] env[69982]: WARNING nova.virt.vmwareapi.driver [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. [ 1433.517917] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1433.518347] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5699fef-6df7-40f1-a4f0-dde442273c72 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.529754] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea9a875-752c-460d-a2ef-91c2e5264e58 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.553607] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.561512] env[69982]: WARNING nova.virt.vmwareapi.vmops [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. [ 1433.561767] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1433.561975] env[69982]: INFO nova.compute.manager [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1433.562259] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1433.562933] env[69982]: DEBUG nova.compute.manager [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1433.563100] env[69982]: DEBUG nova.network.neutron [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1434.010282] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1434.010526] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.151s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.010792] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.998s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.051324] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.056708] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93095f59-1a8f-4974-9a53-34ae7de571b5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.066814] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a346989e-7d8c-4886-b12f-8c940b0a4267 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.103082] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec551073-25f5-4f4a-a2e2-6227c8f39646 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.108182] env[69982]: DEBUG nova.compute.manager [req-3a94e66e-fdd9-4e91-a60d-59d8e9c700bf req-898351cf-7a97-4ad9-a109-8b1c0b87a62c service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Received event network-vif-deleted-1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1434.108400] env[69982]: INFO nova.compute.manager [req-3a94e66e-fdd9-4e91-a60d-59d8e9c700bf req-898351cf-7a97-4ad9-a109-8b1c0b87a62c service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Neutron deleted interface 1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d; detaching it from the instance and deleting it from the info cache [ 1434.108596] env[69982]: DEBUG nova.network.neutron [req-3a94e66e-fdd9-4e91-a60d-59d8e9c700bf req-898351cf-7a97-4ad9-a109-8b1c0b87a62c service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.116867] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a01e53-1687-41cb-a56f-e3b315aa0157 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.133517] env[69982]: DEBUG nova.compute.provider_tree [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1434.556269] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.607140] env[69982]: DEBUG nova.network.neutron [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.613239] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6fad11f4-8037-4dd9-90f9-2fded18bf2f5 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.630459] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf5a83f-d508-40c0-848d-52ff8a1968f2 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.645828] env[69982]: DEBUG nova.scheduler.client.report [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1434.647591] env[69982]: DEBUG oslo_concurrency.lockutils [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.637s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.647843] env[69982]: INFO nova.compute.manager [None req-d48e33bb-ed4b-4084-b4eb-2c54eeaaa4f5 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Successfully reverted task state from rebuilding on failure for instance. [ 1434.672718] env[69982]: DEBUG nova.compute.manager [req-3a94e66e-fdd9-4e91-a60d-59d8e9c700bf req-898351cf-7a97-4ad9-a109-8b1c0b87a62c service nova] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Detach interface failed, port_id=1fb08d3e-9e48-4e27-bf75-bcc1f28bd96d, reason: Instance 902f02d7-6852-46fc-ace9-8abb0f36ee9e could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1435.055459] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865837, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.899487} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.055967] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5ad07c95-ab1e-4c9f-9a71-09dea5249e71/5ad07c95-ab1e-4c9f-9a71-09dea5249e71.vmdk to [datastore2] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk {{(pid=69982) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1435.056705] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c11e4b-a1f4-44d0-a079-bcca540d237b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.081282] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1435.082157] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84a43201-f3ba-4fd6-a521-4f80fc64341f {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.104955] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1435.104955] env[69982]: value = "task-3865838" [ 1435.104955] env[69982]: _type = "Task" [ 1435.104955] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.115107] env[69982]: INFO nova.compute.manager [-] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Took 1.55 seconds to deallocate network for instance. [ 1435.115270] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865838, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.616994] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865838, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.671596] env[69982]: INFO nova.compute.manager [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Took 0.56 seconds to detach 1 volumes for instance. [ 1435.674131] env[69982]: DEBUG nova.compute.manager [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Deleting volume: 40c0390a-c463-4307-82f3-688f52cae521 {{(pid=69982) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1436.117819] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865838, 'name': ReconfigVM_Task, 'duration_secs': 1.003739} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.118166] env[69982]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 86b67d38-95b6-46cf-bf7c-524a06773c0c/86b67d38-95b6-46cf-bf7c-524a06773c0c.vmdk or device None with type streamOptimized {{(pid=69982) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1436.119088] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f533f0d1-a868-476c-8e67-696033b066c7 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.133887] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1436.133887] env[69982]: value = "task-3865840" [ 1436.133887] env[69982]: _type = "Task" [ 1436.133887] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.143311] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865840, 'name': Rename_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.236329] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1436.236701] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1436.236962] env[69982]: DEBUG nova.objects.instance [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lazy-loading 'resources' on Instance uuid 902f02d7-6852-46fc-ace9-8abb0f36ee9e {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1436.644315] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865840, 'name': Rename_Task, 'duration_secs': 0.41889} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.644596] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powering on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1436.644864] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a706d328-fb78-4700-a2e3-0aa8759070e9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.652204] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1436.652204] env[69982]: value = "task-3865841" [ 1436.652204] env[69982]: _type = "Task" [ 1436.652204] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.660743] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.778313] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0543ed65-880e-4902-b915-2b080b5b9f29 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.786957] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0eeba6e-7326-4940-a6fe-f29d856a0191 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.599590] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19339aa3-212f-4e3e-bb8d-334b93070ac6 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.609100] env[69982]: DEBUG oslo_vmware.api [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865841, 'name': PowerOnVM_Task, 'duration_secs': 0.473023} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.611585] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powered on the VM {{(pid=69982) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1437.615123] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ad10cf-1ae3-423d-ad73-f82e00e80dd3 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.630543] env[69982]: DEBUG nova.compute.provider_tree [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1437.721649] env[69982]: DEBUG nova.compute.manager [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Checking state {{(pid=69982) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1437.722571] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b86e8d-ca9f-4fec-9f35-981d8d9c4422 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.134578] env[69982]: DEBUG nova.scheduler.client.report [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1438.240327] env[69982]: DEBUG oslo_concurrency.lockutils [None req-8e8a9a51-991e-42ac-8466-9b5d16452ed1 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.006s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.639437] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.403s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.009315] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.161062] env[69982]: DEBUG oslo_concurrency.lockutils [None req-eef14027-a1b3-4c76-a43f-dfa36e0947c1 tempest-ServerActionsV293TestJSON-1625655290 tempest-ServerActionsV293TestJSON-1625655290-project-member] Lock "902f02d7-6852-46fc-ace9-8abb0f36ee9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.226s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1474.137365] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1474.137805] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1474.137865] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1474.138083] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1474.138260] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1474.142128] env[69982]: INFO nova.compute.manager [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Terminating instance [ 1474.646330] env[69982]: DEBUG nova.compute.manager [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Start destroying the instance on the hypervisor. {{(pid=69982) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1474.646669] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Destroying instance {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1474.647647] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecd3b2b-b22e-4e7e-945f-ce372ea7ec01 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.655719] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powering off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.656014] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f67e9ec-d42e-4a59-928b-8d644a954dfa {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.663172] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1474.663172] env[69982]: value = "task-3865842" [ 1474.663172] env[69982]: _type = "Task" [ 1474.663172] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.674706] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.173804] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865842, 'name': PowerOffVM_Task, 'duration_secs': 0.219542} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.174195] env[69982]: DEBUG nova.virt.vmwareapi.vm_util [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Powered off the VM {{(pid=69982) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1475.174195] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Unregistering the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1475.174431] env[69982]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23136393-ad71-478e-ad9a-74a25782ae6d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.241740] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Unregistered the VM {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1475.242099] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleting contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1475.242304] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleting the datastore file [datastore2] 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1475.242574] env[69982]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-488514c6-e886-468b-89b7-8bd63c8dcce9 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.250082] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for the task: (returnval){ [ 1475.250082] env[69982]: value = "task-3865844" [ 1475.250082] env[69982]: _type = "Task" [ 1475.250082] env[69982]: } to complete. {{(pid=69982) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.258748] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.760731] env[69982]: DEBUG oslo_vmware.api [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Task: {'id': task-3865844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135868} completed successfully. {{(pid=69982) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.761027] env[69982]: DEBUG nova.virt.vmwareapi.ds_util [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted the datastore file {{(pid=69982) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1475.761202] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deleted contents of the VM from datastore datastore2 {{(pid=69982) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1475.761376] env[69982]: DEBUG nova.virt.vmwareapi.vmops [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance destroyed {{(pid=69982) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1475.761538] env[69982]: INFO nova.compute.manager [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1475.761793] env[69982]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69982) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1475.761980] env[69982]: DEBUG nova.compute.manager [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Deallocating network for instance {{(pid=69982) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1475.762090] env[69982]: DEBUG nova.network.neutron [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] deallocate_for_instance() {{(pid=69982) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1867}} [ 1476.207611] env[69982]: DEBUG nova.compute.manager [req-6742865d-2f2d-4863-bdc4-d689ba5de3e0 req-d037f092-a6ae-4eca-8255-861ad832d462 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Received event network-vif-deleted-7e64b942-be80-4f11-8eaa-9e4f6219c94d {{(pid=69982) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1476.207972] env[69982]: INFO nova.compute.manager [req-6742865d-2f2d-4863-bdc4-d689ba5de3e0 req-d037f092-a6ae-4eca-8255-861ad832d462 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Neutron deleted interface 7e64b942-be80-4f11-8eaa-9e4f6219c94d; detaching it from the instance and deleting it from the info cache [ 1476.207972] env[69982]: DEBUG nova.network.neutron [req-6742865d-2f2d-4863-bdc4-d689ba5de3e0 req-d037f092-a6ae-4eca-8255-861ad832d462 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.690910] env[69982]: DEBUG nova.network.neutron [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Updating instance_info_cache with network_info: [] {{(pid=69982) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.711056] env[69982]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc061fda-d233-434c-a7b0-bbdd3b31fdb0 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.720932] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83679133-ea9d-4f7f-b4c5-952a4f2bfe22 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.748832] env[69982]: DEBUG nova.compute.manager [req-6742865d-2f2d-4863-bdc4-d689ba5de3e0 req-d037f092-a6ae-4eca-8255-861ad832d462 service nova] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Detach interface failed, port_id=7e64b942-be80-4f11-8eaa-9e4f6219c94d, reason: Instance 86b67d38-95b6-46cf-bf7c-524a06773c0c could not be found. {{(pid=69982) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1477.194108] env[69982]: INFO nova.compute.manager [-] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Took 1.43 seconds to deallocate network for instance. [ 1477.700427] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1477.700819] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1477.700952] env[69982]: DEBUG nova.objects.instance [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lazy-loading 'resources' on Instance uuid 86b67d38-95b6-46cf-bf7c-524a06773c0c {{(pid=69982) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1478.238246] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9bef518-46bb-4878-a23b-97364a63b2bd {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.246286] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eded623-474f-4dc9-ba0f-302237d98457 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.276466] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9306c8dd-a2ac-4a27-a6e7-47260ee55f9a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.284024] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e732afe-71da-43f2-b45c-cc8a56fb870a {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.298699] env[69982]: DEBUG nova.compute.provider_tree [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1478.802242] env[69982]: DEBUG nova.scheduler.client.report [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1479.308050] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.607s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1479.327509] env[69982]: INFO nova.scheduler.client.report [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Deleted allocations for instance 86b67d38-95b6-46cf-bf7c-524a06773c0c [ 1479.836048] env[69982]: DEBUG oslo_concurrency.lockutils [None req-44035850-5a4c-4722-9b70-0de1591ee4e2 tempest-AttachVolumeShelveTestJSON-1631444546 tempest-AttachVolumeShelveTestJSON-1631444546-project-member] Lock "86b67d38-95b6-46cf-bf7c-524a06773c0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.698s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.285758] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.286180] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances with incomplete migration {{(pid=69982) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1485.787618] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.788093] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.285431] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1487.285857] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Cleaning up deleted instances {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1487.797967] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] There are 24 instances to clean {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1487.798237] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 902f02d7-6852-46fc-ace9-8abb0f36ee9e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1488.302430] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 842ca18a-d33b-4658-9d78-e51b5de0ea68] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1488.806098] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 86b67d38-95b6-46cf-bf7c-524a06773c0c] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1489.309382] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8f0dacd5-59ec-495e-bac7-df2b76883562] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1489.812949] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 889ff9c8-08a2-4249-ae5b-bc94bc16dc8c] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1490.316549] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 2cbd6771-48dd-44bc-a0e3-96dad0a7aead] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1490.820046] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: e778d67f-13de-4a50-9c46-137bdbfd4ddf] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1491.323593] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8a2c1c8c-a2a8-482b-9004-41971ed2b493] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1491.826865] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ffb343e2-b631-4ff8-9da4-e07462d126c7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1492.330930] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6c20ba60-6552-47f3-8eb3-a71923a0a68f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1492.834954] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: cb226b65-d91f-4216-9844-37c22d3705a7] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1493.339744] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 4f7e2fe2-7f5e-46a4-b11c-52ad18e7ba7d] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1493.842967] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 7d84344b-cc66-4d9a-b5b4-4fd26a75648e] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1494.346650] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 7609d0f6-6b9d-4a1d-b64f-f95c33c5a38f] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1494.850033] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 6341394e-2ea2-4d77-b818-6d3bf5a32e97] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1495.353654] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 54b91e61-1302-40e6-b928-fcca31cd9b3a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1495.857589] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 36b7f89e-7552-40b9-ada4-01abfcea8310] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1496.360924] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: ce24e165-230a-44bb-ae46-d1479e71585a] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1496.865467] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 191d4433-cae3-48af-9c83-fa67499ad49c] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1497.369388] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 9815a4f9-3827-4e83-b897-18edadcac55b] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1497.873275] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 8089e191-85df-46cd-8a6b-415bfd5d6748] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1498.377179] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 92f898e2-0dfd-45ed-b74b-958f6c5af844] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1498.880631] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: b5ad55cc-9010-46be-bfd0-28fa1607f1c3] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1499.384668] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] [instance: 31f56d0e-7c64-4fe3-917e-7ebb814ae924] Instance has had 0 of 5 cleanup attempts {{(pid=69982) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1499.888784] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386128] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386522] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386522] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386676] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386834] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.386975] env[69982]: DEBUG nova.compute.manager [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69982) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1502.387206] env[69982]: DEBUG oslo_service.periodic_task [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69982) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1502.890757] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.891038] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1502.891202] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1502.891361] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69982) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1502.892320] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd1de82-11fa-4bff-8ed1-e7880e8515ae {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.900915] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960a14cc-aa2a-4533-a6dd-b97212c1cb43 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.915135] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b02be3-e359-47db-a4a4-2f60da82e62d {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.921772] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f88191a-febb-45b1-8a16-23ed7f368e1b {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.950154] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180711MB free_disk=46GB free_vcpus=48 pci_devices=None {{(pid=69982) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1502.950376] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1502.950534] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1504.088128] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1504.088383] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69982) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1504.101794] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1fa4fb-6124-4c3b-9c96-7d90119e2237 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.109996] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c8640-958a-4b5a-a0d2-15f4f29650df {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.140424] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a78864a3-1ba4-47e5-9254-d13cf7e8f397 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.148377] env[69982]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c471c10-3bfb-4a23-ac55-e23949f5bbb8 {{(pid=69982) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.161924] env[69982]: DEBUG nova.compute.provider_tree [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed in ProviderTree for provider: 206a5498-2e79-46c1-a636-9488a05fb67d {{(pid=69982) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1504.665032] env[69982]: DEBUG nova.scheduler.client.report [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Inventory has not changed for provider 206a5498-2e79-46c1-a636-9488a05fb67d based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 46, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69982) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1505.169887] env[69982]: DEBUG nova.compute.resource_tracker [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69982) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1505.170281] env[69982]: DEBUG oslo_concurrency.lockutils [None req-04eed6e6-7583-4afb-9dab-df2650b6cc34 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.220s {{(pid=69982) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}